From 5ba30a8badd95fe13f6f12e19371c4f849f8d2fa Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Fri, 14 Oct 2022 11:22:18 -0700 Subject: [PATCH 001/413] Added tagging of converted files --- plugins/module_utils/encode.py | 33 ++++++++++++++++++++++++++++++++- plugins/modules/zos_encode.py | 1 + 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index fdaeacdb4..9b5c44a02 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2022 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -453,6 +453,25 @@ def mvs_convert_encoding( return convert_rc + def uss_tag_encoding(self, file_path, tag): + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) + class EncodeError(Exception): def __init__(self, message): @@ -460,6 +479,18 @@ def __init__(self, message): super(EncodeError, self).__init__(self.msg) +class TaggingError(Exception): + def __init__(self, file_path, tag, rc, stdout, stderr): + self.msg = 'An error occurred during tagging of {0} to {1}'.format( + file_path, + tag + ) + self.rc = rc + self.stdout = stdout + self.stderr = stderr + super(TaggingError, self).__init__(self.msg) + + class MoveFileError(Exception): def __init__(self, src, dest, e): self.msg = "Failed when moving {0} to {1}: {2}".format(src, dest, e) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 2f28768f4..21ca9ae63 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -494,6 +494,7 @@ def run_module(): convert_rc = eu.uss_convert_encoding_prev( src, dest, from_encoding, to_encoding ) + eu.uss_tag_encoding(dest, to_encoding) else: convert_rc = eu.mvs_convert_encoding( src, From 13e2c3bc314feda59b12031d414302070cbb7007 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 18 Oct 2022 09:10:07 -0700 Subject: [PATCH 002/413] Updated tests for zos_encode --- plugins/modules/zos_encode.py | 13 +++++- .../modules/test_zos_encode_func.py | 41 +++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 21ca9ae63..0fdbb47e5 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -494,7 +494,6 @@ def run_module(): convert_rc = eu.uss_convert_encoding_prev( src, dest, from_encoding, to_encoding ) - eu.uss_tag_encoding(dest, to_encoding) else: convert_rc = eu.mvs_convert_encoding( src, @@ -506,10 +505,22 @@ def run_module(): ) if convert_rc: + if is_uss_dest: + eu.uss_tag_encoding(dest, to_encoding) + changed = True result = dict(changed=changed, src=src, dest=dest, backup_name=backup_name) else: result = dict(src=src, dest=dest, changed=changed, backup_name=backup_name) + except encode.TaggingError as e: + module.fail_json( + msg=e.msg, + rc=e.rc, + stdout=e.stdout, + stderr=e.stderr, + stdout_lines=e.stdout.splitlines(), + stderr_lines=e.stderr.splitlines(), + ) except Exception as e: module.fail_json(msg=repr(e), **result) diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 970fc8944..5f1e8cfbf 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -14,6 +14,7 @@ from __future__ import absolute_import, division, print_function from shellescape import quote from pprint import pprint +from os import path __metaclass__ = type @@ -126,6 +127,10 @@ def test_uss_encoding_conversion_without_dest(ansible_zos_module): assert result.get("dest") == USS_FILE assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") @@ -149,8 +154,13 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): assert result.get("dest") == USS_NONE_FILE assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_NONE_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=USS_NONE_FILE, state="absent") def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): @@ -193,6 +203,10 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") hosts.all.file(path=USS_DEST_FILE, state="absent") @@ -217,6 +231,10 @@ def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}/{1}".format(USS_DEST_PATH, path.basename(USS_FILE))) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") hosts.all.file(path=USS_DEST_PATH, state="absent") @@ -244,6 +262,12 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is not None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_PATH)) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") + assert TO_ENCODING not in result.get("stdout") + assert "untagged" not in result.get("stdout") finally: hosts.all.file(path=USS_PATH, state="absent") hosts.all.file(path=USS_DEST_PATH, state="absent") @@ -292,6 +316,10 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is not None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") @@ -369,6 +397,10 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is not None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") @@ -417,6 +449,11 @@ def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_PATH)) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") + assert "untagged" not in result.get("stdout") finally: hosts.all.file(path=USS_DEST_PATH, state="absent") @@ -498,6 +535,10 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): # print(cat_result.contacted.values()) # for uss_file_result in cat_result.contacted.values(): # assert TEST_DATA in uss_file_result.get("stdout") + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") From 2d5c8c3dba91562584b1cdee021ffbd263b4d428 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 24 Oct 2022 17:34:49 -0700 Subject: [PATCH 003/413] Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. --- plugins/modules/zos_copy.py | 162 +++++++++++++++++++++++++----------- 1 file changed, 113 insertions(+), 49 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6d7fd98bc..9c3ae782d 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1259,6 +1259,8 @@ def copy_to_pdse( dest_member {str, optional} -- Name of destination member in data set """ new_src = conv_path or temp_path or src + src_members = [] + dest_members = [] if src_ds_type == "USS": if os.path.isfile(new_src): @@ -1267,36 +1269,17 @@ def copy_to_pdse( else: path, dirs, files = next(os.walk(new_src)) - for file in files: - full_file_path = os.path.normpath(path + "/" + file) - - if dest_member: - dest_copy_name = "{0}({1})".format(dest, dest_member) - else: - dest_copy_name = "{0}({1})".format(dest, data_set.DataSet.get_member_name_from_file(file)) - - result = self.copy_to_member(full_file_path, dest_copy_name) + src_members = [os.path.normpath("{0}/{1}".format(path, file)) for file in files] + dest_members = [ + dest_member if dest_member + else data_set.DataSet.get_member_name_from_file(file) + for file in files + ] - if result["rc"] != 0: - msg = "Unable to copy file {0} to data set member {1}".format(file, dest_copy_name) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"] - ) elif src_ds_type in data_set.DataSet.MVS_SEQ: - dest_copy_name = "{0}({1})".format(dest, dest_member) - result = self.copy_to_member(new_src, dest_copy_name) + src_members = [new_src] + dest_members = [dest_member] - if result["rc"] != 0: - msg = "Unable to copy data set {0} to data set member {1}".format(new_src, dest_copy_name) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"] - ) else: members = [] src_data_set_name = data_set.extract_dsname(new_src) @@ -1306,23 +1289,39 @@ def copy_to_pdse( else: members = datasets.list_members(new_src) - for member in members: - copy_src = "{0}({1})".format(src_data_set_name, member) - if dest_member: - dest_copy_name = "{0}({1})".format(dest, dest_member) - else: - dest_copy_name = "{0}({1})".format(dest, member) + src_members = ["{0}({1})".format(src_data_set_name, member) for member in members] + dest_members = [ + dest_member if dest_member + else member + for member in members + ] - result = self.copy_to_member(copy_src, dest_copy_name) + existing_members = datasets.list_members(dest) + overwritten_members = [] + new_members = [] - if result["rc"] != 0: - msg = "Unable to copy data set member {0} to data set member {1}".format(new_src, dest_copy_name) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"] - ) + for src_member, destination_member in zip(src_members, dest_members): + if destination_member in existing_members: + overwritten_members.append(destination_member) + else: + new_members.append(destination_member) + + result = self.copy_to_member(src_member, "{0}({1})".format(dest, destination_member)) + + if result["rc"] != 0: + msg = "Unable to copy source {0} to data set member {1}({2})".format( + new_src, + dest, + destination_member + ) + raise CopyOperationError( + msg=msg, + rc=result["rc"], + stdout=result["out"], + stderr=result["err"], + overwritten_members=overwritten_members, + new_members=new_members + ) def copy_to_member( self, @@ -1558,7 +1557,15 @@ def backup_data(ds_name, ds_type, backup_name, tmphlq=None): ) -def restore_backup(dest, backup, dest_type, use_backup, volume=None): +def restore_backup( + dest, + backup, + dest_type, + use_backup, + volume=None, + members_to_restore=None, + members_to_delete=None +): """Restores a destination file/directory/data set by using a given backup. Arguments: @@ -1569,6 +1576,10 @@ def restore_backup(dest, backup, dest_type, use_backup, volume=None): tries to use an empty data set, and in that case a new data set is allocated instead of copied. volume (str, optional) -- Volume where the data set should be. + members_to_restore (list, optional) -- List of members of a PDS/PDSE that were overwritten + and need to be restored. + members_to_delete (list, optional) -- List of members of a PDS/PDSE that need to be erased + because they were newly added. """ volumes = [volume] if volume else None @@ -1581,15 +1592,57 @@ def restore_backup(dest, backup, dest_type, use_backup, volume=None): shutil.rmtree(dest, ignore_errors=True) shutil.copytree(backup, dest) else: - data_set.DataSet.ensure_absent(dest, volumes) - if dest_type in data_set.DataSet.MVS_VSAM: + data_set.DataSet.ensure_absent(dest, volumes) repro_cmd = """ REPRO - INDATASET('{0}') - OUTDATASET('{1}')""".format(backup.upper(), dest.upper()) idcams(repro_cmd, authorized=True) + elif dest_type in data_set.DataSet.MVS_SEQ: + response = datasets._copy(backup, dest) + if response.rc != 0: + raise CopyOperationError( + "An error ocurred while restoring {0} from {1}".format(dest, backup), + response.rc, + response.stdout_response, + response.stderr_response + ) else: - datasets.copy(backup, dest) + # TODO: check that new dest also gets erased when it's newly allocated. + # TODO: check other restore_backup path (when allocation fails) + if not members_to_restore: + members_to_restore = [] + if not members_to_delete: + members_to_delete = [] + + for member in members_to_restore: + response = datasets._copy( + "{0}({1})".format(backup, member), + "{0}({1})".format(dest, member) + ) + + if response.rc != 0: + raise CopyOperationError( + "Error ocurred while restoring {0}({1}) from backup {2}".format( + dest, + member, + backup + ), + response.rc, + response.stdout_response, + response.stderr_response + ) + + for member in members_to_delete: + response = datasets._delete_members("{0}({1})".format(dest, member)) + + if response.rc != 0: + raise CopyOperationError( + "Error while deleting {0}({1}) after copy failure".format(dest, member), + response.rc, + response.stdout_response, + response.stderr_response + ) else: data_set.DataSet.ensure_absent(dest, volumes) @@ -2180,7 +2233,7 @@ def run_module(module, arg_def): emergency_backup = tempfile.mkdtemp() emergency_backup = backup_data(dest, dest_ds_type, emergency_backup, tmphlq) else: - emergency_backup = backup_data(dest, dest_ds_type, None, tmphlq) + emergency_backup = backup_data(dest_name, dest_ds_type, None, tmphlq) # If dest is an empty data set, instead create a data set to # use as a model when restoring. else: @@ -2308,7 +2361,14 @@ def run_module(module, arg_def): except CopyOperationError as err: if dest_exists: - restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) + restore_backup( + dest_name, + emergency_backup, + dest_ds_type, + use_backup, + members_to_restore=err.overwritten_members, + members_to_delete=err.new_members + ) raise err finally: if dest_exists: @@ -2504,7 +2564,9 @@ def __init__( stderr=None, stdout_lines=None, stderr_lines=None, - cmd=None + cmd=None, + overwritten_members=None, + new_members=None ): self.json_args = dict( msg=msg, @@ -2515,6 +2577,8 @@ def __init__( stderr_lines=stderr_lines, cmd=cmd, ) + self.overwritten_members = overwritten_members + self.new_members = new_members super().__init__(msg) From c04f506fb97dc7ecc75527d35e3329d32cca8751 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 25 Oct 2022 15:33:57 -0700 Subject: [PATCH 004/413] Fixed cleanup of dest when module fails --- plugins/modules/zos_copy.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9c3ae782d..f89e12d90 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1608,8 +1608,6 @@ def restore_backup( response.stderr_response ) else: - # TODO: check that new dest also gets erased when it's newly allocated. - # TODO: check other restore_backup path (when allocation fails) if not members_to_restore: members_to_restore = [] if not members_to_delete: @@ -2253,11 +2251,15 @@ def run_module(module, arg_def): dest_data_set=dest_data_set, volume=volume ) + raise Exception() except Exception as err: if dest_exists: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) erase_backup(emergency_backup, dest_ds_type) - module.fail_json(msg="Unable to allocate destination data set: {0}".format(str(err))) + module.fail_json( + msg="Unable to allocate destination data set: {0}".format(str(err)), + dest_exists=dest_exists + ) # ******************************************************************** # Encoding conversion is only valid if the source is a local file, @@ -2369,6 +2371,7 @@ def run_module(module, arg_def): members_to_restore=err.overwritten_members, members_to_delete=err.new_members ) + err.json_args["dest_exists"] = dest_exists raise err finally: if dest_exists: @@ -2565,6 +2568,7 @@ def __init__( stdout_lines=None, stderr_lines=None, cmd=None, + dest_exists=None, overwritten_members=None, new_members=None ): @@ -2576,6 +2580,7 @@ def __init__( stdout_lines=stdout_lines, stderr_lines=stderr_lines, cmd=cmd, + dest_exists=dest_exists, ) self.overwritten_members = overwritten_members self.new_members = new_members From 9fae4e54caa268c16fe44de8e93fb7f8a26aebfc Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 26 Oct 2022 13:01:11 -0700 Subject: [PATCH 005/413] Removed exception used for debugging --- plugins/modules/zos_copy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index f89e12d90..b316dfa16 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2251,7 +2251,6 @@ def run_module(module, arg_def): dest_data_set=dest_data_set, volume=volume ) - raise Exception() except Exception as err: if dest_exists: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) From 626d1f19474c676cf4da47f4b87529378e8b667f Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 26 Oct 2022 13:07:32 -0700 Subject: [PATCH 006/413] Added pytest markers --- tests/pytest.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/pytest.ini b/tests/pytest.ini index fd7be108f..b354e0cf8 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -5,3 +5,6 @@ python_functions = test_* markers = ds: dataset test cases. uss: uss test cases. + seq: sequential data sets test cases. + pdse: partitioned data sets test cases. + vsam: VSAM data sets test cases. From 4cbcc53071a622945103f3d407c52d227a38f4c5 Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Thu, 10 Nov 2022 12:23:41 -0600 Subject: [PATCH 007/413] solved pep8 issue --- plugins/modules/zos_encode.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 0fdbb47e5..a4a92a985 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -514,12 +514,12 @@ def run_module(): result = dict(src=src, dest=dest, changed=changed, backup_name=backup_name) except encode.TaggingError as e: module.fail_json( - msg=e.msg, - rc=e.rc, - stdout=e.stdout, - stderr=e.stderr, - stdout_lines=e.stdout.splitlines(), - stderr_lines=e.stderr.splitlines(), + msg=e.msg, + rc=e.rc, + stdout=e.stdout, + stderr=e.stderr, + stdout_lines=e.stdout.splitlines(), + stderr_lines=e.stderr.splitlines(), ) except Exception as e: module.fail_json(msg=repr(e), **result) From 98315a49d6ec631ba53eb9414087fa67046507a6 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 14 Nov 2022 16:38:01 -0700 Subject: [PATCH 008/413] Added more information to error when restoration fails --- plugins/modules/zos_copy.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index b316dfa16..f984e9195 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1613,30 +1613,39 @@ def restore_backup( if not members_to_delete: members_to_delete = [] - for member in members_to_restore: + for i, member in enumerate(members_to_restore): response = datasets._copy( "{0}({1})".format(backup, member), "{0}({1})".format(dest, member) ) if response.rc != 0: + # In case of a failure, we'll assume that all past + # members in the list (with index < i) were restored successfully. raise CopyOperationError( - "Error ocurred while restoring {0}({1}) from backup {2}".format( + "Error ocurred while restoring {0}({1}) from backup {2}.".format( dest, member, backup + ) + " Members restored: {0}. Members that didn't get restored: {1}".format( + members_to_restore[:i], + members_to_restore[i:] ), response.rc, response.stdout_response, response.stderr_response ) - for member in members_to_delete: + for i, member in enumerate(members_to_delete): response = datasets._delete_members("{0}({1})".format(dest, member)) if response.rc != 0: raise CopyOperationError( - "Error while deleting {0}({1}) after copy failure".format(dest, member), + "Error while deleting {0}({1}) after copy failure.".format(dest, member) + + " Members deleted: {0}. Members not able to be deleted: {1}".format( + members_to_delete[:i], + members_to_delete[i:] + ), response.rc, response.stdout_response, response.stderr_response From e3a270f1f11b78a319b62e83e6e8a2221de15ca9 Mon Sep 17 00:00:00 2001 From: Demetri Date: Tue, 29 Nov 2022 13:59:07 -0800 Subject: [PATCH 009/413] Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos * Correct typo Signed-off-by: ddimatos * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos Signed-off-by: ddimatos --- Makefile | 283 ++++++++++++++++++---- make.env.encrypt | 449 +++++++++++++++++++++-------------- scripts/mount-shr.sh.encrypt | 71 ++++++ scripts/profile-shr.encrypt | 197 +++++++++++++++ 4 files changed, 767 insertions(+), 233 deletions(-) create mode 100644 scripts/mount-shr.sh.encrypt create mode 100644 scripts/profile-shr.encrypt diff --git a/Makefile b/Makefile index 3a95c29fb..da868e7b9 100644 --- a/Makefile +++ b/Makefile @@ -47,42 +47,133 @@ divider="====================================================================" .PHONY: help Makefile -## Encrypt the `make.env` configuration file as `make.env.encrypt` with user specified password +## Encrypt the configuration files with a `.encrypt` suffix for files +## [make.env, mount-shr.sh, profile-shr] with user specified password. +## If no password is provided, you will be prompted to enter a password for each +## file being encrypted. ## Example: +## $ make encrypt password= ## $ make encrypt ## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. encrypt: @# -------------------------------------------------------------------------- - @# Check to see if there is a make.env if not exit before deleting the - @# encrypted make.env.encrypt + @# Check to see if there is an unencrypted file(s) to encrypt, you would not + @# want to delete the encrypted version if the original unecrypted is not + @# present as there would be no recovery process then. @# -------------------------------------------------------------------------- @if test ! -e make.env; then \ - echo "No configuration file 'make.env' found in $(CURR_DIR) "; \ + echo "File 'make.env' could not be found in $(CURR_DIR)"; \ exit 1; \ fi + @if test ! -e scripts/mount-shr.sh; then \ + echo "File 'mount-shr.sh' could not be found in $(CURR_DIR)/scripts. "; \ + exit 1; \ + fi + + @if test ! -e scripts/profile-shr; then \ + echo "File 'profile-shr' could not found in $(CURR_DIR)/scripts. "; \ + exit 1; \ + fi + + @# -------------------------------------------------------------------------- + @# Check to see if there an encrypted version of the file, if so delete it + @# so it can be encrypted. + @# -------------------------------------------------------------------------- + @if test -e make.env.encrypt; then \ - echo "Remvoing file 'make.env.encrypt' found in $(CURR_DIR)."; \ + echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ rm -rf make.env.encrypt; \ fi - @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env + @if test -e scripts/mount-shr.sh.encrypt; then \ + echo "Remvoing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ + rm -rf scripts/mount-shr.sh.encrypt; \ + fi + + @if test -e scripts/profile-shr.encrypt; then \ + echo "Remvoing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ + rm -rf scripts/profile-shr.encrypt; \ + fi + + @# -------------------------------------------------------------------------- + @# Encrypt the files since we have verified the uncrypted versions exist + @# Note: we should move make.env to scripts as well + @# -------------------------------------------------------------------------- + + ifdef password + @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + + @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr -## Decrypt the `make.env.encrypt` configuration file as `make.env` with user specified password + @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + else + @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + + @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr + + @openssl bf -a -in make.env -out make.env.encrypt + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + endif +## Decrypt all scripts used with this Makefile using the user specified password +## Files include: ["mount-shr.sh", "profile-shr", "make.env"] +## If no password is provided, you will be prompted to enter a password for each +## file being decrypted. ## Example: +## $ make encrypt password= ## $ make decrypt decrypt: @# -------------------------------------------------------------------------- - @# Check configuration exits + @# Check configuration files exit @# -------------------------------------------------------------------------- + @if test ! -e scripts/mount-shr.sh.encrypt; then \ + echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ + exit 1; \ + fi + + @if test ! -e scripts/profile-shr.encrypt; then \ + echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ + exit 1; \ + fi + @if test ! -e make.env.encrypt; then \ - echo "No configuration file 'make.env.encrypt' found in $(CURR_DIR) "; \ + echo "File 'make.env.encrypt' not found in $(CURR_DIR)"; \ exit 1; \ fi - @openssl bf -d -a -in make.env.encrypt > make.env - @chmod 700 make.env + @# ------------------------------------------------------------------------- + @# Decrypt configuration files + @# ------------------------------------------------------------------------- + ifdef password + @echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin + @chmod 700 scripts/mount-shr.sh + + @echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin + @chmod 700 scripts/profile-shr + + @echo "${password}" | openssl bf -d -a -in make.env.encrypt -out make.env -pass stdin + @chmod 700 make.env + else + @openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh + @chmod 700 scripts/mount-shr.sh + + @openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr + @chmod 700 scripts/profile-shr + + @openssl bf -d -a -in make.env.encrypt -out make.env + @chmod 700 make.env + endif # ============================================================================== # Set up your venv, currently its hard coded to `venv` and designed to look first @@ -98,34 +189,41 @@ decrypt: ## $ make vsetup req=tests/requirements.txt vsetup: + @# ------------------------------------------------------------------------- + @# Create the virtual environment directory if it does not exist + @# ------------------------------------------------------------------------- @if test ! -d $(VENV); then \ echo $(divider); \ - echo "Creating python virtual environment 'venv'."; \ + echo "Creating python virtual environment directory $(VENV)."; \ echo $(divider); \ $(HOST_PYTHON) -m venv $(VENV); \ else \ echo "Virtual environment already exists, no changes made."; \ fi - @if test ! -e $(VENV)/make.env; then \ + @# ------------------------------------------------------------------------- + @# Check if files exist in venv, if they do we should not decrypt/replace + @# them as they could have edits and risk losing them. + @# ------------------------------------------------------------------------- + + @if test ! -e $(VENV)/make.env && \ + test ! -e $(VENV)/mount-shr.sh && \ + test ! -e $(VENV)/profile-shr; then \ echo $(divider); \ - echo "Decrypting configuration file into $(VENV)/make.env."; \ + echo "Decrypting files into $(VENV)."; \ echo $(divider); \ make decrypt; \ mv make.env $(VENV)/; \ + mv scripts/mount-shr.sh $(VENV)/; \ + mv scripts/profile-shr $(VENV)/; \ else \ - echo "Configuration file $(VENV)/make.env already exists, no changes made."; \ - fi - - @if test -e $(VENV)/requirements.txt; then \ - echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ - exit 1; \ + echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ fi ifdef req @if test -f ${req}; then \ echo $(divider); \ - echo "Installing user provided python requirements into 'venv'."; \ + echo "Installing user provided python requirements into $(VENV)."; \ echo $(divider); \ cp ${req} ${VENV}/requirements.txt; \ . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ @@ -133,7 +231,7 @@ vsetup: else @if test ! -e $(VENV)/requirements.txt; then \ echo $(divider); \ - echo "Installing python requirements into 'venv'."; \ + echo "Installing default python requirements into $(VENV)."; \ echo $(divider); \ echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ @@ -143,7 +241,8 @@ vsetup: endif # ============================================================================== -# Normally you don't need to activate your venv, but should you want to, you can +# You don't need to activate your venv with this Makefile, but should you want +# to, you can with vstart. # ============================================================================== ## Start the venv if you plan to work in a python virtual environment ## Example: @@ -177,6 +276,10 @@ vstop: ## Example: ## $ make build build: + @echo $(divider) + @echo "Building Ansible collection based on local branch and installing." + @echo $(divider) + @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ ansible-galaxy collection build && \ ansible-galaxy collection install -f ibm-ibm_zos_core-* @@ -233,6 +336,7 @@ test: @# -------------------------------------------------------------------------- @# Check configuration was created in venv/config.yml, else error and exit @# -------------------------------------------------------------------------- + @if test ! -e $(VENV)/config.yml; then \ echo "No configuration created in $(VENV)/config.yml "; \ exit 1; \ @@ -347,11 +451,15 @@ install: ## Example: ## $ make version version: + @echo $(divider) + @echo "Obtaining Ansible collection version installed on this controller." + @echo $(divider) + @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; # ============================================================================== -# Check the version of the ibm_zos_core collection installed +# Print the configuration used to connect to the managed node for functional tests # ============================================================================== ## Print the contents of the config file (venv/config.yml) which is used to ## connect to the managed z/OS node to run functional tests on. This will only @@ -368,20 +476,50 @@ printConfig: fi # ============================================================================== -# Check the version of the ibm_zos_core collection installed +# Print the make.env contents # ============================================================================== ## Print the contents of the venv/make.env, this only works if ## you have set up a venv using `make vsetup` because a password is required to ## decrypt and a decrypted copy will be placed in the venv. ## Example: -## $ make printenv -printenv: +## $ make printEnv +printEnv: @if test -e $(VENV)/make.env; then \ cat $(VENV)/make.env; \ else \ echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi +# ============================================================================== +# Print the make.env contents +# ============================================================================== +## Print the contents of the venv/mount-shr.sh, this only works if +## you have set up a venv using `make vsetup` because a password is required to +## decrypt and a decrypted copy will be placed in the venv. +## Example: +## $ make printMount +printMount: + @if test -e $(VENV)/mount-shr.sh; then \ + cat $(VENV)/mount-shr.sh; \ + else \ + echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ + fi + +# ============================================================================== +# Print the make.env contents +# ============================================================================== +## Print the contents of the venv/profile-shr, this only works if +## you have set up a venv using `make vsetup` because a password is required to +## decrypt and a decrypted copy will be placed in the venv. +## Example: +## $ make printEnv +printProfile: + @if test -e $(VENV)/profile-shr; then \ + cat $(VENV)/profile-shr; \ + else \ + echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ + fi + # ============================================================================== # Cleanup and teardown based on user selection # ============================================================================== @@ -396,11 +534,6 @@ printenv: ## $ make clean level=all ## $ make clean level=min clean: - @echo $(divider) - @echo "Deleting venv" - @echo $(divider) - @rm -rf $(VENV) - ifdef level ifeq ($(level),all) @echo $(divider) @@ -416,7 +549,11 @@ clean: ifeq ($(level),min) @echo $(divider); @echo "Minimum teardown selected."; + @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; @echo $(divider); + @rm -rf $(VENV)/make.env + @rm -rf $(VENV)/mount-shr.sh + @rm -rf $(VENV)/profile-shr endif @if test -e tests/functional/modules/test_module_security.txt; then \ @@ -426,17 +563,23 @@ clean: mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ fi - @if test -e make.env; then \ - echo $(divider); \ - echo "Encrypting 'make.env' to 'make.env.encrypt'"; \ - echo $(divider); \ - make encrypt; \ - fi + # Unsure really need or even want to do this as part of cleanup + # @if test -e make.env; then \ + # echo $(divider); \ + # echo "Found uncrypted files, encrypting them."; \ + # echo $(divider); \ + # make encrypt; \ + # fi else - @echo "No level has been set for this target, please set a level." + @echo $(divider) + @echo "Default teardown, deleting $(VENV)" + @echo $(divider) + @rm -rf $(VENV) endif -## Copy your ssh key to a `host` or the default which is your username. You must +## Copy your ssh key to a `host` or the default which is your username. If you are +## copying a key to a production server, a second key will be copied used by the +# jenkins node, this minimizes the number of times you must copy a key. You must ## have set up a venv `venv` as that is where the environment script and configurations ## get written to manage this make file. It avoids continued decryption prompts to ## force users to set up the venv via `vsetup` @@ -444,14 +587,49 @@ clean: ## host - choose from a known host or don't set a value for the default operation ## which is to user your username to look up your default system ## Example: -## $ make copyKey host=ec01132a +## $ make copyKey host=ec33012a ## $ make copyKey copyKey: + @echo $(divider) + @echo "Copying SSH keys to the managed node authorized_keys." + @echo $(divider) + ifdef host - ${VENV}/./make.env --cert ${host} + @${VENV}/./make.env --cert ${host} else - $(eval username := $(shell whoami)) - ${VENV}/./make.env --cert ${username} + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --cert ${username} + endif + +## Copy your ssh key to a `host` or the default which is your username. Then +## copy the super share mount script and profile for the mounts, execute the +## mount script and exit, upon rmote ssh, `profile-shr` will be located +## at `/u/${user} where user is defined in the make.env `host_list`. You must +## have set up a venv `venv` as that is where the environment script and configurations +## get written to manage this make file. It avoids continued decryption prompts to +## force users to set up the venv via `vsetup` +## Options: +## host - choose from a known host or don't set a value for the default operation +## which is to user your username to look up your default system +## Example: +## $ make mountProfile host=ec33012a +## $ make mountProfile +mountProfile: + ifdef host + @make copyKey host=${host} + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" + else + @make copyKey + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr endif ## Display the z/OS managed nodes available and configured. This will show which @@ -459,20 +637,20 @@ copyKey: ## Example: ## $ make printTargets printTargets: - ${VENV}/./make.env --targets + @${VENV}/./make.env --targets ## Build the changelog, this should be a release activity otherwise the generated ## files should not be checked in. ## Example: -## $ make buildChangelog -buildChangelog: +## $ make buildChglog +buildChglog: @. $(VENV_BIN)/activate && antsibull-changelog release ## Update the documentation for the collection after module doc changes have been ## made. This simply calls the make file in the docs directory, see the make file ## there for additional options. ## Example: -## $ make buildChangelog +## $ make buildDoc buildDoc: @. $(VENV_BIN)/activate && make -C docs clean @. $(VENV_BIN)/activate && make -C docs module-doc @@ -482,11 +660,14 @@ buildDoc: ## Cleanup and remove geneated doc for the collection if its not going to be ## checked in ## Example: -## $ make buildChangelog +## $ make cleanDoc cleanDoc: @. $(VENV_BIN)/activate && make -C docs clean + # ============================================================================== -# Cleanup and teardown based on user selection +# Self documenting code that when comments are created as expected, the help +# is auto generated. Supports multiline comments when comments are prefixed with +# 2 pound signs and a space, see examples in this makefile. # ============================================================================== ## Help on how how to use this Makefile, options and examples. help: diff --git a/make.env.encrypt b/make.env.encrypt index 678e78381..ad7ae2396 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,182 +1,267 @@ -U2FsdGVkX1+h3hd2AlxuoIMQpQFeOb4GeCx4Y0NZ+oWHMlO5G3Ytxln6awGw4REt -UiB8NWzVroF24yJoAC6F2Xwz4MzskKFYsHKLU90061OIfu8I3BCuNGSLz9Hsw/cW -yRwkIdd0VjdVV7Wuk60dHFlHFHrY/kjrwv0lHymLla07ymOV9JUgCJutVdSNzWX0 -1CAXSHUVBeOJ7nuKVnknlanAMNsa2/IDyVE5GkrbE2mYD1cCbapIKhpf9yAiqICf -nl/m8XxZHj+MgssC1PC7JFyMc7mHxWwJdruQUXgLHtcHYDHu8Qd8TDztsOFkdsE5 -v1qRdvTifjOOA3zUE29e1WEHlky6ThtzNa9P/1z4pPMS5r2HUOoLONfgvO+0oeP3 -UhmW26BUuOwAxSIf3L+JZ/ZigUt04LJe3ah9PD5KiN/SokE1Rynwej4yxnGRUMeW -OLj/tsQCnxJf7Tx/p3NcqorAjFt1h7wCjxazoGpxiJJWcaFLyH6j/LSBezpU43uJ -9nCiT6zKsTDNpODntZEVWykIri+2xPfOq/nIYag1KyKdPeVw+PX1rvDkWcS4WoQ7 -5yYj0q580zDaHi7qHrLdhd1bbNaiHxJpgPH/q9goMq5Yt+V32BGUAwnkf546EouZ -ywDfoVwr27QkAfw00Mnu/+SUjAUlCupi+GbgaE5pbKcyBYh1ZqLoJRKkrFkNkJCO -vVFAVPbHsPMHdGg/0hgMTE3KIk4tUMZ4VfngdGdgkGE0p8EnVM3pdrGEpTI63wqb -axw2mNmzLwMSYfl0FJVdcUU85K4kSvyitk3pLvdCoLO3cXMmUPObJXEyLSFh2JpG -qEcn5tPf76Jei52LbtFXBHQmr7Usktx2RLC2y06v9OAfu9godOiHTDRFaPfxNBUQ -OmoL6xmNjdGoIg3veYSIPsSCMgid8AF6xUmOFCYJT+NfyUE/klcWKc5ZQY6c5ukL -GjZ5vlDyyB05ADeQiQSJ1stO7kwl8UduJ/gXfeZ+arOSH3SxZg/WjDewoqKAuMvB -lWzwvtqHeYcfFFNi9KpXEQS1r0YAJREWfQQPg0yK6oxpwnPScOqLS4pkr8ThLzZD -/Qu7/R1H0PiY+aAyTJwXmJLgWl8ZeA+FeORD+YRASzlv10bA+k4DN8iasO/MkR5n -mIilzEJyyYMoQZN1GlafsBkaqg/4fNJV0wtnQBswTWWTr6+DYBSt7gcy6Bt5hbDY -msdL1Jgcqt9HXBKSK5c0L9r4DtCbKaFvyLIjUFKID4/cDqKFAkxoagwGNIpq242g -qkSHxRwyxc4S5PIQvRxf3/hfQErYS6in7s2yYQaa8R+246CRwwnYBx/sl4PXmlsk -rvVeiSGGsl4ZZsU0s50cLCs7jmU2yqeMT6hS/brQkcD0MwzRXbuqa5kaz2LD6vO6 -t/g/YPkMv6FBtnFodEkJYesE+uhe/vA4a0NrlKqWfbBRQai/HCUwwj+uT4V0YiFM -Ib4ZMJq6m1VEcp/+xU0ABPaW9KD5z0olNMgcA4N75gjCueQbQSLh1N4ESzwO98J5 -9Eb9qmldSoprkOdHxZRhPO1KBGFDnnmiuHB7wamimS0Ts17n0JBYqdlCHlFOX91g -ZwYBZpbBt7bSupgNgxISY/RucEcilJJlziaNbSCcYvZKw7VKAbObyXSfaqPeYbS4 -Xe2wsyJMuhEQBqHGhnS2gWsUZqs1PIV1pgmVWd7VaTgrX2WrspVRZSFlP5HITLnp -/VY2xxBxkjTHlLU4qqF8+c6hrgVmIjmeLqfFHU8M9sKu+ZJCOBbE1bOsTAxEZoLw -qYw1Y2KdP/ZAIbzRpmxhAJmPtkWfGrTLMsq00Hauz1u/e6pDIXvWVUZSvk7dDL4+ -29jSWTXI4vqNv4mc4DiB3hqIYUGiUt16YqEIXiSXL+fVM1gQG3qZkJMbCr/y5G39 -DCXe+yk5i6g7gmpxBRwmOwBLgA+4toiZcls/3VG9VtbUPQ+aXYbMKq3puFeDAbDO -zsC3J8o+n39JL3gi7lpstsk5UTVi625k4Hja052wBe8vwx7rnuPPh7TV4NsJpb0O -AbWf0x66vif2kamtRj+bq1aqc+9CSuCGbWt1P/Vh3FG/wOsUrs/60EpXIERFdPxU -0HBg7Obp3gdCzzQIJBDurK2RTO5om6yht6FOwj/RwnH9FudtcE8UyN+BexHpxcqE -uX/2ON/l3Rrn1jfu8xOFWb8bz3LgQbvgWO645b5x4PTbvgezNjGhoYfkRZ9ouM+a -OI0+JCm/w4vMdyumzvJUhqLFdDfDIRxp+z0Qohcr9BnHsOHK5Fm7C3li5rx975ZB -ETDE4kUPr3NkQmPCzICsKmDzwjFaqddR5n9ORaEdAvTxp8B/qlUaVESKvsEbAguI -fHBixQa9VnK6JMG527LGCqC1bQrh75iEXAVdvDgIrnVwSK5ZmnApzJMs0gI/QIoy -cYE+v5yn8SpfBC+NBbDaiUqHU3lPPNiZinlhzO22MI8/1ivl2c0rjwBO1pMqEyWC -VzIv2pwiTBDy4JOWlIEB2SCV8co1MHOypWypE+ccZUfpPZaoNmin/MzbnSe5/tmH -NdI/BqXCv2DVODHT8otpEozYzKylJ4IKMJbS56aQsC+TNLca3uuUNHqSI5dg3/X9 -/kLXZUtUsHt2kvO9md6HnXGIk+kzpPGX5SCmManNxGBOB+a3mxg+/tx4cUHwnLmr -DT/L5BffrDHGn9cFy1XgVKGheLng1xxqQwa9rV5fadepz2QhAzjQ6xH60ojXHpmr -BIp6BF6irlg1w8OLDv5S0VQrNvqnbVByY4NQNeaGcCGLccPmFSrJOuUdPHvdnAo5 -3g4oMcF35661Rqasqv7D9XXSJ0pyEyYg8wcaF2hMVV5e7WMrAMj74RPO7H4VHsqc -rj2UT2Ww9euknHuKYnFteoBCgjMblZiRD3v6Z8fZoXK8Jq1x9OrlYEHevYD2rkDs -VD+0FlwWibs6z14XMUVlk9yYKUYcJnnYytHr8L6/6THJS+dHc/3X3Km4VqgcZwKu -BVs6orcjS1Gr4aowaT8Z9F+R2f/1ouizBf1x3w8BFwIQ0SmIDAPDL6Sa9GcUYNAF -WKNpFI1IVnX2kvMAzonZYM16yFTAWG7ulZmE1+QWp3kxV7RjVCVb8UXIBbCNq6yA -KBUR2sjMGuY8iMBstyesMJLaGS6nqe5kopUxX5otLzuukp2hnyqg6SO50ZagMTqs -faSVEnLRR87KJelbsn9ImN2rlwCEM4djOx7TxFUk5wjzQTaHFg+Ofpp+GILQAQ0C -6Xun97z3IPBAUseyjItg6RNlK/qvlIbwh7a99qTOp5S500r/2Q/MXuIN1xsgJ+Vu -M8dCS9UUDRrlDjX07V4fm78qCe3aCgD4g4r4oGvvrlEl8qeLWhYQQriVCMPpnSFl -17Vb/SAalfP86CaLXpApVrqx7uwcmo3rO6GKTSXdkwQBr6vXRpP2/eknfaiPb1SI -cfOY7HPzjbWYonRZvXlYpE37ibggQlkTPfTQ0D7gKtMyARKjw/TJJnytPFPx9Ilc -5HF9dZPnOPNH5CTpeh2qt1T9piCWF+sYH2W0Tvyy933mfU6g9iOeAxg0IDzJ41C7 -JhodTDjD6Y6a/v6FD1dYQ8V+5ottzkwrWC1I3/anUGvA/jeQe3SDorU0gCf0G079 -MCv3G2eFRz152HKCC7YLpW1wKuaEo5bpyuLRjUF83hNsPKGH5ZwBoWuVnzh0VWnQ -xanccfpJoDRUPkfz1JtIeDZleiIY59EmSZuOvtH+xWche2v/aov+JAODFI8z056c -KYFxLFGBaplYka4DhuYPOjQOyW4uMH+nYQCqF+zErOA5CrNj6rSLQXLN4r5uAqyh -IlpxXcYNKHGGNAS2BrQS136m49QPWC8b+i/zsQKcykRdPaJws11/xQXif+ZMN37g -zEVkOzpv00w+fB+qkCB8IDkjKsR771dYTzZfWE5/z7/D0a2zCC68KBSWWi1bFnG/ -3ELB3LAX6Qe4cLlG0RIA8rE7/Ff3EWd7GBbnQaRqZu2IpZBkl/kAxlaIIkNv70lj -C1jP9k+wPrf7oiRjWWNvWx/E3+S1nZJeU2Xzlq2PG05/2B2PAs3GSe9i3SY/4Ov4 -OO9WR+e2CsWeaQU6b4eQ3IrZEnKiYBcTYwAaXyYq/LKFM9BSu+erGU8V7MbtRjmg -JkjTjbu+1H9SWYwcRAcrTP8gbhiWvpa48Aa2icj9s+5eV8tkhnsqQaRzqYTyX1QF -rNGhq60Oy1jzEjz/jIgyIJJiX6/gbkgIQpawgh75tSpIFDi2ra/AixvFwPHpCCgx -AeHQ/Lzf5l2UzypMkAG7C7PchuFztv9Tb6vHAszyVzOBzS95Lgr66WYQKsfLPQif -GPO8d/XR9aXC/V5uFsPJpPW/gFrFAgAhr8q4MTn4OSL1EWT6VryufQ14OpMVnADa -qG98G11W0SFMNVbuDEd4AtBlbqxRavCejY2AntMDG8RmLOdkpAB8eIOb0OJN/XUY -fm+Ljs+eUiU3TuCMjlUyJMePJJJacb5vO2h70sJikl84O3gAFMzdE3p2aDWtFqX8 -r6K1TYZIWk4afJeIFLn5IftqAf3GJbPvM2qMp8q8hrQhRtc5RUr5Z9xc/WCoP1P5 -GrSBIeS3EEF+KsVgfOLbltfidvI5bmX0nz1OLJv/1Yp9JjDG3LJV+wnKWePNzoDp -eS+XFdvZJY2OWnjhfDA42fVtTdQb+Cfhljdj6XJCbT1Q0KD9o/uN47IK24csXX3v -+xIQRqvslzndrtr7KE3Jwg72exZo497C2T6WpIEw3UCPCiWLfogwKtW+fen05PFp -BkuCAzxZJn1MHDpUimQWoyRWuZuNQ3vztWWnKj9cejYaXIcbHheezb0xqg3gBONQ -gg0DKaSW2ULmIOIOp5DIXnKf9ag3JQpDsKXnjAsKaG0Ii62Y7xtW6vmCkbygjWsG -mIC2ivf1gZfx7+6xdmzXnyMZxeW24/eeANl9QQz9Ai7MmDbJ8yXP/M9FNM2a/DaM -q79MZLZW/t/vNu7yyZYh/6qks94Sk6kmrQQwQLlm3+H9sDDF1MUITtJnf8N2+lx8 -b3uhKPylpooNGJLLEKidhmCfWOQ2um6rzUVyKFizLkFPsJx3k241sIfHHjTB3RDB -+Lfqt31Q9RHSxwi7sXdGO8lZTWILUYqtrp0qpIbyYbCFj4qKCgH3i5G/rB70FadV -t1Mqu079ujuZVtzKZcF4GRqYYE+Kr0zOuwuhchwNHSAIzJjfZZ0rTZYWzuUS9sMi -LMQlcU1Sa7IcZcgiWXsAzrSoE2th8m+jMuzCWTxUUzwoq/3pVPGik63MlcL0qkz2 -0TJ4TIGMA2L/tLdJoZNXwFABIkDLJqQ0isCTUAWi7b/d4iVXpZsOgmgDs/aBJGxG -If7Jrefz/Ho9ROE0C5T80MgeOitfIWcs5rIZdDpMZWzKITE6GYj6NCyu9SE68LEz -VyUawZXZyGLI8GH0pwfUc+dapsum7BRVPw+MrhaY088QuqYTDYOBNxkoM1ylt6eH -63PpW8RoPIQq+RTwdN0WN10ocEm0B+0KAi8FU/fwAjORdDlvsjBAmyMNyPUVOZSf -CO4VrQbHG8hLhMNtH/AOrVmN5jm+BbrmwF/3oyXW8VkYpsEwwvWob0dufgSAOPJQ -5X5vCjArjtsQXFXi1gjJpYB+Ik43b+mS+iwiwk6OB9Shtyfl8DA8lpMZoQpbUnFb -Cx2YWZHr/oJb+Ab4i1c6QC4OEZfSvG4LABlt0NBdIjQk0pgf4rfMSEOwT/5s5F9Y -9bN10XwrYbZUg1TAtEHylxqfSGuRF0cOd5PcTQtx0e65YlTpYA1C1DS6TrWukk30 -S5Hf+bvsws9HDo2Wj1im3NNcBrX8w2q7SJINVOs8pqX5MfD/b0mQdD1fRQPIfmju -tYPqZmdTpsMNT8j9sJ5Fg0yL1wHchGlaw9D29ZDa604UUH4qMzC1m0Wy6QMdLE2P -1AQwDLizq2gTvS+pNp6sEMbO+hKZD4l33Ps5MrPY8rQrUDdUIwJ9jNrvRu776KG4 -uWqCwEB2LNcHXbWF9kARkfMWbXseRZicm6c6UtXCUvWufepCUXN3h9UM0pghJGVq -J+JH5VIfpcb5kml9zdcYtiHAFzTnJXNBD9yKv8KP4yFwqPoUTsV3jO1+RGsxiZ9t -h620KpFVFi1Ass8DMRJAv088Yxj0NG17mgPLPv/NPo13/dZXV1l6bv6H8jmRDyI4 -vcfkVIRhRyujb9PR0Jd3CrMI695yK+zsxScn9sP4KtgP/dbDONfES4jkE+pou5JY -/+oCqcgVkvJ5o4d7m680bnXBv7/5mxAKwBret6LD5VV0HdUU5/VAfWs3yN0v7hw7 -8oFxQ7TG29oE9IrWhhPivKMvcy+yu49wwzJYKSEnoRZRRmvgy97nZLRKJBDK3hT0 -Y/TMXmVamg9IcrV2447mtqSt6Rkd+kIJVZevUJKOiRtOIF7QmX9ZFkXjYU5VVobG -T45WTrFZSMbQFEt3YZggL5e2fWjWeJQX3blio3daEwZHh0gMx2E+k9j2y9sENP0a -GfBdjff44hnylxceS16WSCwERLAhdgwDH8a/oWqmiswAzeIOs8hzFZDFyqvTb0iZ -mepn/wIdxewODHw2gPh89wi8q80fc1cNg5xjV3HoYwkj1LruKglAAnW92VfgzYNR -nd9GiVwnDHdElQ15grVIiMhhZ70DXzqAXf8jW06wMh1lGqLnkxZXAvEkN6aN6vFQ -TodRB0vA6+4W5sT0cDS46ej6XdWVKv1VoWL8h7ZgFozoG0dfy393fNZRL/QKfnla -YmJV7+lHssQEWrRoWGDom85gmuG3pAMLZsPepAsdOIRXRjxLL2BSPMQ/6OzYLCxW -AnBeXyjmrQRJ16rfo0+nPUYVuJ76JvC+EmLEhx9MjiOyEMjvdSqvfHziUrHviWI1 -uncm+h4CI+ZiP/9V52PHUG4+hb0t5TdBVx2p/YKX483XJrfGdr2v8tzTOEl2N+dV -bW7KHTghkhAajBjZ8zKqJTfkq7r1o7Z75y2yPCNprouWf9R5HtjGldmjM1F2P+eZ -Cq4o53ijn9SXmcBIcSFV5XSUe0MNrzDI0pfAhdQuripiT1Oh1XvRgdgghRD/3fuE -kj6pxJVRvVBwJNKMVLiVDZq3wXQZwex+BCiC+83I+uMBTVIG9p+Q5NEE5Zx0YO+L -xhBYjQ6YQ0bBaXrAdxBiy+m4sfCpfzAqujitHJxRYzQ+AeolKYRTYS/ReQk8vWqt -EmJPuKuHEsnWA5dqFMdTazydj7we2OhvspRPkSeMmJ5MqrvM1i379e56u1U/tF1l -GnkkPNPLDHlBkN9O5bAX2DeafbVwpqdgO+M0ea3eeOwEg4vLrQgyJfRoGWZZhjQK -vGoGTjFHmAnrYQqGu0ZAeXvxJB8gajTrxKZzLxVikzqsJyTlYRyQfJZS59jr4MWN -IKjGzO2ggWdTblamM7io6fFSoXnCN0651Kd2vfhy6/ak5q8FeZHN8zHmolzfYjWt -sGUq+uGiFrRjHOUEu7cmwKYusvKZM5Mu3B/aIgH7lg+N0wR7YSZkBtoC9CHKNoVA -GMswVd7d8rL31/AQLwSuzs4vlqEh7OL22XR1ZRHzPBdldMA8wINZn3Ym7Z79mmwP -xB7H6JFd36NO+AR8RzKZSuB2cjJjE5JYC5dBhFec5LWqeU+x7/tgtLAfbdCku5ve -y2RFMVaxQAcVHs5Eb1Z6nlqaTSCJT8iDOAPLKJ7/UqAIImD5YG5sWOcNwvW5MhaU -cXJan7uG86WOtvJeWScNRKqg1JVOCXeCgGWisI3MRsu2UUmIR97sTLHDfUqLRNa5 -qVrVAAU1LaR2AGeA/WMpQy+0u5/YxaPf3q2wPnsqYidSD+6SeBKQXDsYG2/gDath -RUEJeZGNiqMT9u/qIGbSKMKeQYb0bDPYOqQYTKWZWmDE+2i4XFZvxOTUeFUkQpmf -tjHBc4vANCWVqlkW/cYLxpqbVtlxUX26Z8yIkJq5tqm5dgF/O8urtQd2lQ7+pG3V -CtAAmE7pU9NqQ4g8SB7+gKDJF4d1Lzv4k8i4ibZY29GxgR/g4l8Li6ceT5oWIOgn -sPtfzzYZYVOLaYhoyv8Q8Nj+S1cEk7rhsgV5nnBRi4j1b42TAKkKQmTChHymK4IG -wq/Bik4DYNU+vBWihleO5yXGQpLFoPVbSk91dxxkgk2Hj9rpEWTxjmSRFjKCr8Xm -Lo2cgfKeCukiNk18NNig5sjqmkYMfTjqVlv3YBcX6BQVChhXf+EzvhvaASV3+aiQ -ztS6SUMROZznqxwZvANK2MhBN9pJOZthmzYQ22ifmDaxV3FlTODB7sYtOdyS31Nh -447MPBwlZl5aj7xRC2Z66bABMiyVmFTBmuZcqV4qS8fYUmpKryf3On7vyGIiJch4 -PwfqN4rh0J1rGqDUmqZyf7fsPU947G8FgkkT4QlGLoYzvbNk3aVpRIH3cMyWdEGc -0qDbMKXrcDbuPRLe2IG+FvYw1I9EKRnls04oltWMZ127k6Dxz9oSRrpWrR0TwwKo -1cJRRxDTo7zdsn+2Cq5gkuDqsA5UaO8ejRicgdvntUkxeuxToi2Y/Rv/GwTXNehj -6SWqA63sJmwYZEqMyTquy2sxS2Bb6EtBUfHlmoaEKr4jFRih6IaGcY+M4n/tysSC -8KpmDaT+0zgSRKzgksl3cVQxjHbt+vsSXHzl7D+LAahOVRXzgftnryTM47dGO+lg -sCuiEqhLQw2mrUeU9RSppQzVRnOz+4JD2YJr9az6a9L4jHsgG1KymuExePkTWB4r -CUfuAEiRAgkp3i2WwsZwXYt3h9KRAI88YdywxUOmuN6MsFkrHVRTajwCajJ95/62 -dQx47VnnHkPZD5P9a2n9ilUx12bSE7JRvehtfWP9utvokYMRc8bOuXFgLjTJD0GD -UzID74gJx7cayX2+afZXNTMeMIK2aESnmVosHZgRNYEbul7maRYEMl3V8I8wUcEA -QcRaW7YV4uz5tOZBTLipa17/kD6E4hcLoO3xcq+BobxnAFE4cK5VV9mw/1aw80W5 -dk32iuYZhTfd5WGPOCXGYEon8NT+OvK9SMzaL/O/Fwy1N71y/iUZdYFoKaiTGhJJ -ThA0+gXQTxRqu4hwlC9oM12DHuigqarDanGmRyqyGmzbffg2zPGEXpPNV3c+8kmJ -/RctCx5QozpeoUXKlK/OsQBPzSTO32pK9LxyI2AXc9ofIhdCLSA3OrBJOOHyuBwf -SqW9VTCLZ4W2O9LfL62qP+CjxyGi+s0yqOY4Gl7Oq7R2dZXfmvJHQptVL6tYWZ05 -EM8xmJaXkFjgo05JPYAzUsm3f2Sr0UinfZwHnfQcbp1A+fJa15FhW9zbD9mN/y/x -u9jPzS31+4D5KjxdM6zg/NOzkzv5hV62/xVTZwaI8ZZ0MPAOuK+q7n9dO8klKMfo -U23UE9J3sFQxaC0qYD8T4Z/xV/Sv7HsLrQjwB+uf7HxYb/ZvL5UjikWfmJIlIatA -MR6gqDZOCFDipfhzMKAD5Q9UOmNYMxVF9pMHwddJJvqKkajHYwvnPa6nm201GpIn -5K9a4WyCuO7jCpI/HaTl+gZszjSHSXv95cX+LGl5LZM2KLm/D6v/3tfpXBYHnftk -7NQ2DTXI+qUeCR653JbN3E0IhzQWqdGo6Az5D8AhUDp17NqW3q8LYOBwQUTptVOZ -BVWFm0k/myp8hbPgo/ge9OoxWbMof1Vmnd0lCuFvx+VRqeNWlcdEpt1IgzHJCMuL -ubImNmxRpCgqNtS66aJafe5PabEV+35x+BemTvRnGJpIsBxoetNs+uRsLqwW4pUf -uq5K7xhN8VRol4P3mY0vrgNh8gp2HfxuY3mnVP70AdAA0mRlSTdFeUvSox19h5Eh -HsvvQGBb/o2n6HyLR30X7mkR3CvIqTQnltYRAyusprf2RRQncpj/GY87x+PkV+bI -K/H6+E0XkHoIhz82MytnwqicP9B3g95dCGK5TPPj74Cn8b/p+CdaxWTQTVWPgFjN -qWswrgpVAWjYduFCaZRLZvxey61+mcfF60epO31coqzvikONn5mdyBEZC4ClK3X+ -LViuLNl9xsFvl5cAT1WngWawX0rMrtdgHia5FxoQW2sopcZH8rSPJ3hjfTy1+Aqz -Z1gPveuoMQbmjlMxoPz2cqgRiyXBBw02Z2TcEl1JQ2wsSM4goUTSehWEAtMVdXKw -V9fsk6M3Us9+hQ3f6Ma9pIm5FKiO1lTXvxO64m0wVlbalek6Ebt4YcInHvyj4BQI -Tismemd3E2hHXfMYcVJr7Tz8YkrpHZ4U0MMiYd5gY0m9eAaFjAP6KQuJBsRHqSya -f8yoPs+55zcFovtEMn1MfNmcrrejFCGXqCu7pk9q8n9RDmEhM+m65IBjwTJY4iB0 -ZZ+2TirDDg1LcKry2CkLgOQ+6J7KVyYAXw9Mj6i2RyQGTUNa+WOlQLWSWRaWTThO -o6GpZ7J6voetISjVO7IpMNDC2et9/ty3x0/ZClgh8XHnDgwPYk9sPwxqJkB3LMM1 -W6GFsOkUTAOypD6Zf4xdKYS0Sh6Sy9PWctBUY0pqver3biKk35A96LKiVBel/4LU -R65k3zD35NU5tYPAUU/ODrMTCxdMvwT3bYsh+UQ5S86UBak0lJ2wIk2QBBtPJRqE -sDpawEukNOVLdvVloKHNWud9t8xJT1eO3xoQworqMzH77/fYXR2Mu2Qd/X/ZEGiz -6j5WkTMvWeJpr8B4HzqWKuTJHgoB2TH7xdhBZ+XJMA/VUQo6GDmt9MKRdKZn+04S -XzTVe7NA4UHM4U9P4d3/KnzDntNLb4Dni/G5PWZQQDIhnjzRCQqhFCNa7KZxLrNU -w0awDhq5+gbCIbYyncK/5+o2zk6GhfQAaD1i+MllKLuQ7KlmKrcM/tDPGu9nBUfS -gVh2r48B07dsY9v22QNm9O8GNeyXGe/Xk82RChlbL5eDt1ob7Vi0RfP93MVWus/5 -B6bDXSZ1eY4L+AI5yPETNgHKX9DIzPkkeA+tosRFZ9H+7CG0oCdf29hdHU58rPvA -aXqdtwqkM2CTfdYwMvXMnoGI6DnIJhf8nrfRU7ayNR/FFR1wUu4d0zy32ojqWLzv -rQhjZfC/Vl7UgZXu+KzqdNRHdk3LswN3yZPRRQyDuVoD/KPkYyYBq+9AhmtaQBOL -+Jd0Vyj6TqhtE0oq+D+4L+AechA2qjEC7OnhjfqCvxACt/4WQ4hTtXrMt4M1/x00 -D66Q+5KDcZTTN6xNfEw+N4VkJx1qwsRqVRCmhox5nKGblPR/E8tRAgEtUxq4LoYo -/9PBuwEC+FUrFELr73vj2n+1+1Aq4Y/Va+F2m+l6NWg3+zAmRttdgnNafHM94yoE -vGl8e8zN++2P6X0Tl5HMDojncAQtJvLbBC24yYMu68nNMLLqFlr9UkynbyHV7Dq2 -+S9Fy1VL2kWcVtupiQhcD4gysK8T01AJPZDlu2ikgin6WlNSg90SLBqdw4bEZbBB -OCv8xtSVgOqa1kJoR4o1uwPVlVtqkD5KiIOniEdCbNyhSQzr8B+NBIZFglImDVib -w6AyIUy5mVSl1cH6NZXBBmV+QSltvLPpWoFOTA6Xx28bVBfKHp6gqgrcJzk6URjX -k0+1OAyB/4HKdh/NOfkc8fML7+e4vUJ79G5nHYpecow= +U2FsdGVkX18+W4d9i/Pv7SPHW1HcDa1oC4SgUC3uWVpK7kaXkcdk8lV8odYGtLxf +QoKF/usOm0CkUndeJRTcHdPL4wXDkOp6edyjXo7+7DjopKio+g6YH6q/HwTeOIqR +EUajgflZHSDQ3BwBFNnamxthpBUKmLuOqSd/OMD6FeWsKMW7Tm5lJNBH0sfYA7Nr +sNcWUHFgGJeurhKK3KIHqeMo+Yi2NbYXJ/s3FQGdgUyE+C/+DiRiEUsF3Ej8Vz9N +2fQOy9Z7LgPx6pVVzBzxJxPWAnz6W20rKIDD7Cu75Vh5zJ+TbTNNIFbvFg6caCUS +1/ueAvDy0NSJJle3SyXgO5ApV+JNbwRPTkE2zcoFP5EyLG59t/Tks7U/OxjN8vwV +Tl8FJcfc5tLAQnpm0hxVDph2++o0evh81vXQoUH3atNzPaldrCIrt7x4gzUa579j +cfTMiivakat5dgT9ZE30ogjk0Xvk/phox2fkp3t3GFPrk/H1yd9uC9mUjHww+q+C +Qm1H/5ovWpNNp/qX5npZQOJmb543XUqf6Y6gLrfm4rdL+2GYdrR0fGzaapSx+LZd +owZx19Vnj4NRfk8nHRF8NupffBWBQeDhZcXoFESL5D/pvSoChLUwISv/fw9o6W3w +L4LASrZAJ5ltcG0wdDy+D7si1lDOXREMv3j7tymG2Xtuc7MnVjidFgy4bHz4xLAH +zWBDFXVMIq+yRhL5bYAm8ffVvqWBXQz3Qq3LhKVf4x3ET/bl3jNjYVLMdGBskakI +tIDliOgssk1lAKiSzAF3J6tFN30yEHMfeTaHFl+LUGIfcNMtiVspTy0eRsgRaYv/ +Yr7kktcjhDZlv1Dgcd2Bk7VZwC3wV9DL6uMVwLTNwixsmS+FdVVmoo39j+NMMGDe +wD2PcyQdqHmmnGzhT4sIiezFdatMtWtntpguPjst1imr+58Ujd3D2w+LzE5ajx61 +/9+8ewlb1c3ScuzWS/9bFztK7jjL7ar6aI2ce+eTn70OnurJP3Dp9VBHQlz7RsaH +cqK2dyFcfI+Z3UOoTe504i33Tw6jBvdLl+o2VGPJa3CXhvDjPfCo3kdDMl6g+CLh +Bi1FKuAPkK0y5AZdC6ZbVIpv9EpPovkFrkxngNVmDbrAELMtNVIBxLIm89SghSPT +w+oBNBNvlzdkIARsa0JoYjst3YGDBo6NkLoZOcrUO6Ct8OUtXrXC14FBLpct0yUa +5FDX/iPrhKqsl4HSJ+/FuzMfX/TKGaDLcfL5+x1o5r1liFbM/VruPu+4AsFfiSv+ +Rfrb7YCTgyxS7/sTl4iVvVC0jmpA37mgO+g2hEdCv1n49aNQ93jp/rf26A/fGAdH +P+eGXOOJJyRNsr1knSkmpdOpIlo5L0np7AUpAjI7pC3iSDGBu8JXoK3ciuIY13NV +tvitHB7rsX61GHr23ph360Pqkb2PGkkBILUNF8ZOFfYoLskUXAQQitZ7MkGyDXJM +e2tBJ84BBl+xk9SXzOf+RR7oYL/caWZksUD44bn7o6O5rKlyHeaifb4pRLjyZawg +EYvslYtAgsSFEFFDnAObkA4VlpVpfWzPlK7XTUsiMEpLMPv64adYGsvT9JXlPTKw +v1JOWdVz5hqSAqWMVmXKDvwwVnetVhLsSS4bGBDaIKXFgvCiBbNraURnN6/nR6Ve +bIJvsAzcTJ/ln7u+D8LRiD3gjbWLT10wufofi50tyLMI7/0d7dUrtKgDw1XC7+/x +pNnsKRreRCVqQ/dt+fXMcFBhYJ/1e/SIxN7F4bYJKPfs7yuSqdmnO5mdj/hBZMW0 +YoDVjsBTm8EHkGWf2CoAWYJlusIuiF0YjkLbBxFTdlyZS0EMm2HFrioBYSHspWH/ +sA6x/1pxOoIxHO0EEJdaoU9syOwnUS5D3pnfZyVtxIm1gquAx3BXPIyuXXgBFhdZ +GSnOLzLlQMmCRKdlP284GIRg4rJ3s2kAmGS1L0JXhDzNupysiO+X++ztyXPtYjnf +Xmxu2KqDN1a7JS2bKRGwruPdXqtxmLik+alPQ5UDpuG2QR6FFuW4nOfC3R8gNolh +ZsMnr6dtX2vsM5nfb4p5DpE3ZOIMz/PS30pXC3179pw+NYpTuFqoZMyINAVddqWK +VAVkvH49dL++OFERPXOTp6Wa1t5Mo1sHySMx2kftWv5EHWzw2Zs2c5W9yfT1hsLm +uvpdu38IM3niYKueLpGoavUNYDmNivo6C1eca8I9HxZ8TDhk/jqQD0j9lkTg0Wp6 +CG1LNc3QEerwX8kE9yrNefq0d+1MCTsZ3Hb79pLpVYHZBSemGz8BP4OzgA5rrq97 +t5RUPz7E9vkzGLCdb4p9ln/JYY4M2sXpK5lal/L1enFfPb+2Mk5sRmHEwTnO+VSv +PTSf3DH1lIfWnbU6WeQZSohHQFasqsKRIUYWUaucQVEGMEm56bnsrciumwLJhkNd +JSGtWv+zh8H0Qrg61ehxBUM32t923ZX+TKLkO1dHoofvVpmKQYYMCtjQfuqqlIVF +Uuc5RQzJgs/cWYbBseZMMKuGC7KCwlqwPnwHqoT31LTvAtbhxDHAXKukpzfpPaBu +BWKuZwUCuo+KzaInaaABOelUjdZQJt6zVN5/OjeKG/fEqJgAEvSLjOLzAmDtA3tx +6nNTgfewTqUaL3iWhsz2C0Kkg2pHK75djwA19sSv5M6ehe8odlTo36H+JE7GAyH0 +W7MGXLgjTTnfXk7eZcdk9CaLvl3zZmzMs+9MD8rYI9RGWVb43L3l/56QRB1WL6rP +k9ntQFX4lgnDBukyYBu3H+8byZSwoSspDiybfSBVmU2F7uadTHOQAtP4aMp9y6le +g6Eyc4n7nGRaG8kOJ/pYKcXMDqACT6N6fJaoUx4v+U/6/dyFGVe4cVDVlnGW65Yc +NQT1GrsxzUmKgSnXJyS0YQ/zWrOzt4DUybQ3Gzc0ey1yX79UVD5D6IuvpSbfzFKF +P+8OAtICNXOsPMvYJDDgACnOhs7d9DKeo9gV/ALY+i7CmlcrESG9a4BbIjMhpU8A +HfNP9KHatNzc2ja7RqHvXZL5bqLU8PmUgU79SY0t2eZaTB6ZYd2/F0iVBtPJ40S8 +O6aFtk1hKun1+PD5GBMkhCvGAl+OV3vWA8TJwfr6K3KZgMoDHiZvDe7WTuE9OdGQ +jtkwbpDYmmQilANNli5xiTQdK/B9SDYOTyhYzdNmRg2EF1BqPydl8cMDonGAz5GD +/oDr42nc+KfcIUvWFAC2WqgYn+CABXINY4CuoL1T9a3IDkgyP2OvA+Il7st34GH4 +aBRhYHPU6+d1HtmlWlZ1vdMJ0a9r6dV/0od3lfnn3JjA2saW/3aO3KRLEhdSvkBS +GQAnRaHjOka1maSxungIdLFAUddk++fPPLbnHVV7xSK63YueI0EVfxw42vP9gLxY +9K7QerZyq67vWs/qY+C1P8BylSv6aZgloRX0Kk+X9bV09t5Mo2c7Tr69Yv6hVJti +5bIdc8CiTt6nBIKaz7RYa743r/fAggEIpFAU6uUulnl7iqMOTHsNTcNTZyz1dbbf +NcC0F4TuLFNAaToLDjg3oQRni8LXG8wDMUcnC8ddbKtLTE2mVNg3QOnshZ37BSwy +JbDsJ0wNcS32XhEX7N7fl6vH8jRZJLsEUwyZWev5vB/BcxIN5iAV7Z2WWyWlB1rn +Vg4JqQaXc8jqhRWW3RRfkvblaDTmKgAPsthiJTTc42B8p8IXkZtGw1Io8cEd4w+7 +GirHyzU66+c6mz6/LRIc93OvKEGJDSbhhFsMQsNDwUPddqNKuUaMECIUbtiy3BqM +Z30Ilaqt4hGRXCmo164F/MEQKfZBUNz2JOCEOfsD6fhAmW+g5mTk0lo/79KI8zBk +ny17zbdUBdezdTrN6mOnal5nNeXvLxB+6xypEvz9sLEO8VgO7WxvR7AalBydhNNx +xzelrTnHqm2esJp6/MAg8zT8UG6h115etI3wP/8ptwLD2fPLDlmiufWjJeaxZZXM +b9tY36ehaWgyk+9M1y+5RbKOCW89xlXlY1FOuRUGQbPN/44sZiJV2kJQ27FQPyvN +ykGkVfibL2WBSSzP6UhlL43miPglpMA6CG8ygrez54q3J5p+cFcCwzHbxrv0o30P +2PLzbKlmctsvXaRCd81/tmuACJOUIduzc/NqYgaPP49sovre4yjL/TVD5RuGEZeI +JFWvc3/xnO/srDAqcQ13lCh6zqXtCpllytHMLm6Qrdd12igwd3E92pwJxIrSOmBy +TkIvebk6cbywRpeetgnA6JDeNnWtE+uIMdvvF5t90RXikKdalJK/hY7oh0byXTCu +U1EDAcaHMqyGJaSq7mZdJ6lt02QUDmdHFfUVSRuu1hIxCff7btNY0Ug7hDmswBZK +i8ukwQv3nELoizwhM3ZYIh2BAVHoncNLcorCtz882uJXtZbMSC7RiZ3CgnCjiarz +xjDibUjb68AHOkS8HguEtCB7SAeUFu/wB+5GrQIg1zVquAieHqUzXfWelwFZhO+M +5G19qomUilZ/3NtRvzxpt49oiqmyshyjt/AYLQDIZmMTFxTR/8ydm0E5Bn39JSsa +lXCmK0Lo11i1lhV/lDkVC2bNXo1KQlm4v6A3JTC0Wo0iJD0gnIyZ7YLz340FZV04 +s8rSOQBRuN1AOi0SbmGMU30y8fuWpWF2gqenIeCDUL7KrUDr+/jTRYl3CgogG/wc +rrdSyH3whxj2/fTPvOsy1cHtiy8u5ffv/cMFJuFT3SVPjJRgjnl3zchMMLEpjYkJ +AufAQ5gLmM8s9GGlFOPQs9ha3/j4lepahkAXNkqAAetBpMst7HRfxcW8S2NT+1Fs +rmFSGvpbzMkMkd6aFKUhCirKwJRiYNB+kZBInDUJJSlVBpBwgbGo5jb1oIPyZsY0 +f/laKLgtyP6AkLBPyY3lgTmJBAfC2hEQOLNJlMO//soYPkV64pvb8h9lQXV3Bn8Z +rKkMYk3SiXEmGzoFb5hRXqHJagXf/QfAbP1mhCtl5SMJ+bFxR5OdKNI5Sovl0q1p +NEdLJEElZ3jnIjHAoj4xRmFdwKa3ajFTmtlPw8v71M9z0rKnmtMynvVRfSb5M8xF +q98EsPzl2lCtGnpzchLNKEyuwtiqDJOlRE7SC1ls25BAJhu/LiEfcOsE53bowkp/ +wa9Y6A5HLQ+0+/sBfJy3fB+ufapFKGD9RKesSxJCNUC31v0vaCQHUmLyprr0Ftvd +E3p7drG0Vo1XbfZuFWnFEtAjPSHfpnXOLWjz27IgjlsDt+JxX+IIe/XRp+Iyl+SJ +I51Azn2KhFNnJ15RXSMU+kVAYc+5AUMlcsKgRMrF3CI5QMEekCByodV8RYbLzOjJ +YV5CptYwGdp8/x7zgBDmCOz88T+Zkr5S1iHdeh0PEUMEHOtUscpB/yuC+k77QrBH +YRdWhrM9vLwV8AvyLA0W5Dx+hHx+RDJXCN3RASS/t0H1bgiUzHTB9gg6YwuNQK8S +bY0bIqM6KlJ2CxN22KBWcN6eJM0E5ljmtZo2ZD09SQ4cYnWjp3ckrY6lqpfiIrSB +vIJVCSV0mPUgDlNdid0aOhl33DiJPT+6vmaXxwvADHym/ag4i0T3fnG1SkUWZaQj +auLTkNodiNNOaFYZqz3frCdFbtkCQEY8Nbgt75WSHRO/tI6/VuV8QPZTHzevlyQl +Lq+Smlt0a+JGLKpkViiQwv4xctaywcl+juTsQSfPih8owVCjzMMbOdl6mo74lSHT +cmerKgp5gtDac500g9PgB4hCWYNsdJawFwWnCa2MrSwWa5NOarr0KFSULb0BmBhv +b7e0Dxfc+b+2qRZBq8Pr61j0dt+x4MMMPjXS3HfvWVpqK8MzRc9fcGKOelVNLuAy +Y7eVkuPrygQUqQX6tVYBrBREnbeU5+xSrxvPbW9whAN6kvpM1Z9dpLKnY0XJ6Bu4 +mm9z3PV9ZlZWCjozxpDpc3Cvpod9RE60E6KhshpXPzbam5TiFT+YCBOTA5AvIzip +QRGYcCzvMgjTXajPdm7pHgAvC2PScuj84SvdggxCp37RAwkQ0eOIz9WedeAclkY3 +mC1vt4Px6Xg07i0tIRtzpi9oQM2bTnHcPU846eNh86dEhFu2WwHdCI+8LK8FrbAz +eeq1fIFO0UyAXSGXm0kgkOEo+LUPzpqSwXyvM63uMww7oPUhaNwjH6HVOKKdlX2p +KVm663fahVOLUJ2Oc/ehjj7J18Q9lgT/5S2Z8JMom9WaDXNx4e0OAT95flSbF4IT +lSKRkscQ1G8mnwNbS12r+RH490RQo0aJ/NzLEx4W4rJe49G+jDb2obpGKQ7/ZrWa +b0zu0f/sFpO+xkvRgJ9V80kCVzzSNc8UyktkUp0ZeaNvxVMG++R30bC0//M3RFkR +ky2B7Hw2TRifYV9Uxe46T6Ik/TvtGY8mn0NX1PsK02SG+6GqAlOAqWpbPUaKMMxg +3A95jU+bSiEeBTEeb4/Ydm6tXqUeW5IkT4RMvr+sleMOshmzcY6aINkRN4mApYpG +a4HEqU3+uaClPEyyXEkV00Qb3BK+jVOEX/9qBP/f4zovhaBnMLupBnzhorgyMQWM +vxNcyENv5B8yidXf/qUGDb3cAgOVKaSEE9knl56xSmJ8tH7GPFXHYRhnQYtnQmsa +r099VD4Jgg5075n+/y+rFnt4fHFYh1eF/qg5PkZcSpkc5d8a6fePgad25TjPPbtv +q0QLgzJOPeBJZ18emPQTVlh8SwJ2mcaF3RlqCZ3QxFPPpX9zzyhOSiTIGWoMrT6B +HHNO4oSXFYK9JhQgqRjMfdyyYJAb/ITxBX/juzyqPFiCPvLqLn31XfmvN+L3/iSq +xPB+p6oZwsGxrcZnkemkAOmECo93bu3t7VomHMLaj2RGTtVzB9slgjaTorNm3w0N ++N6P/kgDknJTH6x7UtR0jiqcE8xIi00B2fOwWfmSYnbULQLboqmLH4PDcE6Fev4P +noaWOrDtz197g3D3Hwa8XuMI6ZhKE3HJa0F+B6ctPlS3uhyZWZJwNjm7BYU4cCAR +lwRrv82AG8Tsmv/3kS0jpJfWxiWnupsHB+mLUIVBvP3kNprafQUWrir2t7B5al42 +vymL9UYcWNIJkYgtaPmWgYyKGXzbriJi8RiCKjCTkP7jC1DdENXLAeWtXXqTmMyY +uqnBHv3TZd3ytZT8sUpoGI5JLQQZO6JwyRUSi8J3qp158/x6C7w4wP8IE9LWeHaV +JbhnR5Mhw7kRarAbd//I1JMrZTLOZJI1dkYROyo/LA62aUiKjALlP7EGgdbIoYFJ ++aQHCn1j6Bl54R2J6dcGe5hxIlfx2gnihCH5LZUNPlZojtvxOlcsTT0gK/jRmtm4 +RM7/0maZO1rMiXOjwSPax3BYpw4mvqk9rcY+pDatkHzznTKemvt29fY7EB+MnOSp +lFTzs6oUsyvEDc0f2RRinoYG8IkwiXs1ZJ+OR+auVVFtdPGYWskmeYiIUwYeVtnA +MsTd0jKLc745zpFlT7njRTNobVFBsKfX+zUVStFiR3xMP0xrjbweCE6yZl7FqKOh +JsAUDsjs2oq1OgvHB97y/egW+wMNCDJYuUL6AT3/HMIw8BiZgcZ/yxkTedNxavIu +U0ejzcyBQErrVVYqwQiHVI/EuUcV315ZdAVHrWH1CHoQcnTnrXSoyNm2IFLCtk+T +nP9UT8pypoiAralBGAu/OQ7TKtHjCSJvdmEf55myHS8wwgaFQqWrR1PJjc6OJrGb +8pydkmKHshrsuMk6Ww1HJgjVjA4qSfk8CANNQgV53JaXIc39uxfuzTkxyVyFp/xU +sHWKq5i2mgpIUYnbIdrutbyq0pfBcNUqswEQWa6USsN05fOfLfuJXQTaaPfaDJQF +RUrRyJ1eEYV3Zt4ulhMUlZzYZU9uU3X2Suk1vsUgxCSOJzKKD9Eo1NSSr+gCzdqD +PB8UiKyuc/Q0WQwcWTH0Y6iuylnVQywOvXDWXOiNaznMMnPE5b1Yp4t6+3BlOUr6 +5GombF8w2C4rYl6EiceWXvZ0PHSpxacLG8xgA0R4Cm1gAStlwbgX7+CrFRy6KV/L +CkuD12XZo6iW8HduVA7tk2lTB9RFMDEfomiwqXWOGkJgJLanjz6RNmV+gZp63pB3 +tNDQ5AiIDoGFD/It5E2lhGdTubXXi3bmnUKfcU9zUN5zIIZ8MJTC6KWHeL8JH1Uk +KGhzotqeFH46uzuWOU6LqKoGProk6QxEgewkvgY0wUCFxfc4iSN7hQSFQ5UCBSsk +BbMqFxawc1Q6U9RMGfi1tDXURmsyJW2az2fkdkTKS/uWkUGKgVabbs4RA+lYv9Mi +Q2sfhl8lHKtMMFoBM6H1Ias/Hr+WpCiy+wqzsALdhxfNk/4GMu4g3LO7y5IR8cY0 +2K40cNXC3LA3qXRPvZ3yd0OwXpOpMtryGQqhNtWm2BPe+2o8KeuohXPI7MCeeKiv +CPmbrrhQJJtE/wF8JqqVv5pR9bpISaUq+QxlCA6sRS7VoOm8oGZ4R9AO0Yk+P/cC +1dJZYc/VLqggIOu9DBXb6SMm22ArHCQ+OtryU0pyEpwQcbG/JvhGvsGz+ztY2PZo +PTRaWh+Wj2OJJl0jpBYjqAo7Uy9QSx4oX6q8R4mqXTSeKARLOSMw6ccdRCG2QdCW +FlmmeRq7YWtCCOLXbKJVE2AlaPBKG48E8DmP2ndADOlIYfWnFrJW5yJ2dP6zXQgT +dfOLBiJshxHzECBUvyX7c824atznnc+jiGQL4zThymOCYK4XXT7wDdc0PgIly6hx +R1N8VAH2dor1sSTXexR786sL9BEgr5QLcc11xp16fQu9+HSafsEn+Qupw5nekA+h +rv1dUfuuWQ5cjlfPk0xmw1pQctYfqoVw2/4P6nrzxKj7uZLfXqlTlmUCLr+YEbxI +2iotSilWBmyIHV0MA89yWBSM1cLxiPEuACtLuqeh1r2sya02rCdn27ZJcB7YdcPF +fskVugcZRwQvwfQPSqzKExSqhp/frIgK1Nq7dThGQjQ1KhNoZP+EZRrobPKI60DY +ol5Ihr/FhKSQfhODcX7d+yYXD5V8rGtkBfr78QFsUny2zR9GpSPQIwPInFzO8Vhd +GmDDfGjHDXYektRcuBe9DPRKVUjrpgsOXX/kzju3OEjb+ZfEL9eyDMnBTL1ELCzP +i6QnRRB10igRgkVdK6CVPY8fFOyR6FHI2aIriALZClm6OlMrEcCPxFfolGLrlIR4 +S0nYhAAd2wbT1mEot+LJ5MaKhXgFvvXCW3j1AU0cP7mebvW/LqN+f7VhB/4M9bj3 +Gsxj3CA3UwZMA+/Ufo3NzhHIIgBywDK38An9uqtQcNWwLxaax5FzmKAGBFT9XfnY +zAazI19eqH7FezzF7/nkNA395PS/+Y/gnlavEjst2zma2Wa603vgx72DdfF67y4v +4tI69ahFtNdVoHlFXfFMwMEwgn9AaK1mfiLwE1qz+CaGN7mRjlN4E42v3JEnfz9p +iKJX6cj3sM9VNEflKEbGtA0jFH5MiOXtoq7+yqQJTzLtTeGjSsaJg7DyL+mBI3ZH +Ir9lhxwuklwqrzHTfwv20ORRXB+vqqEztNG0wqh31Wtt0z+R5s8T+h27uEv3Ttoz +iH14MJbRxEZvYHM71NJchr53fyitdM7rRf7nLYz+mNjTyq7y2ziQwqWxkkKZmrK/ +lN7ZV7dPHnYxG1Dh1tgOeen3eKAL2iiz+hOyp8SsX+HAbGPgB/xT8OCqvASIuHA4 +BYVXgUpYWN9xiHhNp4XFfwSxjWEaH8tLkBDMDUl99UD44aw2evQ2k5Oy5HcgCal0 +d1D+sbplj9Au7vxmK8tzX/IsIeC4lXd+pMTloSGosD1GfSbetfIOMvyfON3Hw8Xo +kvGdIEcklKMwdlmv/wJ43WHPk3Z6i1uwQ4D0KOHdF19R4p0gJd7+RIP9L68UGdPj +Kap49lKaoVotry9GPnkIdeGR4YLJ1X9jw4PunsfnjHRC9tTIAKWve546gMFXtZvy +JnjDogx5ZCSyEAkHnTzGG6gDucDwGmRtgHLfeKBFRDqZYpaPGUPneAL79ypbkcBp +bFMeiZuuz41ugekZHSvdgAkiIUksEFAONTjeXmUj8oIhq88lUpKKYSUhE4Q65HiK +0o/wL6NnKZR6YtMgpHC88HMrb/u9c3CP+UuDYaYpuedeoMIJtOrv4t8POWtTRpQL +jk8GwpcRW3Q3hCtmc9ZLpG5TxkwZHr0pBX5P27I5RGEkQwLIQ+QWadUXnIJqrLen +fgBd4XYF+INXQJWMG6hkYMDs1pafHB+4M20l20RoGAqZF8Zd/GXnYdZlVsQ0zBzT +7JV1zUJU/EJetj3rTTBvsqIA9/MtqdPuA95Bv37wjJG9IhrwB++mpV//Zn4dsFm/ +EN0KFKROI5812UGUyLFAEVK4dxExY4L+No3XZuaiuc7eLifwjTK/CIAN/JQd6yd3 +qtSJXzX1ulLpHJsG9SCvwR+qk4m5vRMhPk+3srweiksGlj5QKTHFwWea4vDagMng +IbyJBC9/BADQHbarQm8bNNCsfkm+K2AsJK6mXVrbFtZsnVt/xw38XNgG/weY6wwO +tK65MMAHdt7uTOPbFOhgYeqRabk1q/uVPdFg+XhpVT9TJSXqVEKLPrpp/viaWjAG +AmfTo3NT2YV/t+2cklVZM+RXZato5QA0RrhBGM/ZGK7Q6w8veQ+e33CzMhrO6vue +EbecGhOs6EjZwVWYeLr1iLH1kNZ714WuPgubl1jzU19qGjVvMST9tPsYg622zwPW +MWb1fGCLcol1FrYwKbb2TSsFrpK/64hZswJIhg0w+rjdyH0MOZpXtyA318FpjRIT +kN8bQZfeaqi/fyBmTMjqUKJWhB9sZj9wyvep6sOj+KMYUfRmLliI/VLcWUakAr+L +88QWpNuVGp08mQfzVrd1BwlJYXUda4ijGWpIjAzDaeeNagwGSrGgoVrVBMNdJmZd +/b2Ipuh7BshM6s4Dt8Ni4T4lVEl2dRnEEIFgnhoSspPZ8q2GrVpq++FKqq8RHKbM +py2UBRRNHGL4WOSVDHMmnOmoaVU/N9qehUZ8JrzpMOEf+lMKk/BwRdbWeRA86PX5 +AxpQ1tuwySzUVZdKTlg022tqZEMHBzeXigr8JeiE74eFt27qBvMELY0MZ0+XYR3a +W7s8Vz5bEn7B+r2JeyGYF9J9MQCUPPe8ukHFLbQPp6Zfg3SwDYbXgxN1YVK6dEzf +esaqWeyilBuDx1xDYkuOqiWC17i3NCFfo0Y7oltCciwcyYQBDZe1AKbH2OUkTeCG +UBvDsrkPdvbOQFDSJNGhij3CMApgzVv0cPQ/ElnVRVlqFDxldENpS5TRVx/vzkIN +OkVxcTqIKZ45cZZUj1Lt/uEtXEWbQPkAZjtdBMWUuj08tqnXytaTRVzUxnU2lYIr +rRheU2gjduV9jf7thI81SC9Lje2D47U342h5XO47U4Ao0UmnQXbDNNR9eRa/94Hp +ZMeOhbcI2ukkbPq2O8UjgyRBX5u086RQNa2B+kzvtDO3sZSuHYZqVlKdGgPxcQXv +sXhBFx2MG+xt5lDeuHqPtqfW8yoayshJ38YruYhRW2SKg8zDw3x6v3rWCK5ESvWf +fqVevQscwoaWzW6aTpHSjA/VsajWPmQNfm9CjRt6ncv08XXpSH1o10Dhzq4QwGZb +66dxWR09Sezs7GQunB6Qvl6rOH9SUDVurhsV3BAy4iTllbZR5QCsWKKkPhhuKMuH +TahkgpZDvvOH8gwAvOhcCcIpPRw6fkkBuuZh0PtCVLOcnWmflQMgoWpZKn+g3bTX +/TEa5pyve/4n4+RMJiVlc3jPk4DwpecQkaz/58euzaNzFdQ4eE6twx/+MuTL7Y+M +75PoFCDjpXtQXsDwNh0UNBYMhhiSbJ+JNHmgSGE7Mix6VvlQ8OqxkBATuvRD8W6x +QJopmjVf6lVUrf7Gw0ULqUmnkESN6D1LGdP44GT6iuTGtZjWozF7lp+Edf3GqMKT +DvBxCqX2/ceFtoc6+dD0+PGS1XBw4s0Bu2W2AqmRkqwD7l/CYDNMs2iRCIMbrSlg ++6/VVXmf1xJBSaeIZ0Ure8EIAK8TX7qMD52V1K9O/mG7S7P/94RChKJxRqtyumgf +soXKoLKjR+AS0WtDJKSGKhgK7zZDv9Nvrj+ex6rbFIagXn+kJgKVrNYpYdhegTjj +4MoVjbgS4Q8n1PKaFbwnAfuSBeUF91NUGzemKhnm6jk6r4yhRTlKlNs+v3gJHbYo +5670UO/2I6ea1VAVSDpqeoreo6YlT/N+enlj0Jz8jttLQWvhiTF7pd8bmqkgfCuH +5rYWKvcsdcb9zX2+odFDnTdYgdmTLfGzaNVmhmdLsjOFhstlDnynKmxnNTWrEuel +dSWir+9aRmof50opwpsDb/mVxbNxKbQBdgSgsLy4OItqwDmKQ5vjw2rT2v4sMWhM +cz6aAY3aICNhpyY2q48KolII3jS9vWfWb7GoBv4KQMOD7qMbYAJ7E2mHfEFtxSXQ +rAZ+Zn6yUjvdb2RNkJjj1qtCIMXs1bQLINLYTIyVa3mmIzosnfJB8KDIrDBMSclT +LPzb9Luv3I6zo1fRx6Ny/QOSomZFtkPgckUpjWMB8FYrUjUE7JWjhjq9qWAZX7Ts +EvDamDla6S2P3ocTU4nv8nDa/Yf+xwqBBNVRtZHsDg5SQTxsT+bpnTpG+9nOTfX+ +oXvXjHMMYPhkqZqBNdMbH4updAl8OAaGcnw70HBnS1lmsvpHNj4ct7v8ezGgcgoL ++gsFcQ/fmr0/RvR6vCaQTAjQpLl49e46Q99UbGVEviXvD3fDFO3UPlDbiYGAVsgB +2yBLLc4tRbSmf1zTVSdERXDSMYpp6B0GxWvxyjI8UcGvOzSb28bdYXFdKxvMR+Rd +kNyTXGcU7ZfkhMcjWJeHZj1vcopgZXvPFAAXQtnLhGrYUTXZJL5aCH+VkU3Luk6u +xQ81LyrcV4cTXrzisGk1UsS+ARVAttPJGeMp+fyKKb9vnrWEV7vX7QFABTACrZbJ +CpU4f1iGuUcM2QzYsZpINX4z9wztbKQpDcj0XtHs5GfovCDEfB7fzcRhkgapq99B +mmYffR6mcOzAj/i3eO/1aBRvorcNhLTeAKyxO2Ls7f0VvLEfsriLvfhxLFoA+Bfj +doS/Q822tIN5rnXoCUj4+THoe018V+l7nDrLunRm3zIk/0j6KCFOMCcEWGe7YTju +6XJ2qX70obQEaOMGd2zKyfs3No3EFqLB6qL4b2shX432xMSUpZdJ0QcjUP0gMVrG +35NeKx/maSjwnqbMlEv2irQvjktukjS9bJfkOI0M0FjQ6Bhl1MmbMVgqod9k4aFI +IgcYhO4L7B1QUAo6HEPhWToco3WT7cne8YJ4RrfTPjMbVL1nmAfI9IYOLHR6aKGp +QohZ7oSHJnPfHBm5/cv4PRuB8olcZkQcvoSd2yyFgzBtFx87vYogmQ9jDNFqyZx0 +hfswYoGcjM0uX0wwL8i0kOLuQ5tDa44uZk6VEFIYf/jjU9EdLL9ah0h5C4PYy55V +707GvDNj3wY8XqqVu9Q3lbMelIW7Io/hDqg8OtaFEysfSEQizhBLMl94hU00lfe+ +I7gJz3MomlgA47b7g2QtMBnt45AgmPM2ziCbcXyNVrh0Omc93kyAR2BtHkXhDW3h +A3VWfpP+e9B0l2GLltBEFvQ+SzkWioDD5Wbt99g1y1R8pT1iNuYMnM1YLyMCNb2C +iUSHB0jwGFmmz4hsVeQRoxfdoQLY3RyZ7N6X1pdHkMRMtGzYQ80RD2R3JpBNieXH +Ak+Y4cb8LDgQGd1YOxPvuwHCUJg7+7aoNuF75M3J+Uhonj2TMVpkWssYklI6u/ap +ApaFVs+c7ck56H8S/ohGhNYsoBxoer4gJz980M5qyAYEC9xQg3RjjbBdlFBbrKY5 +IrrVQiKd8q60wo9cPj+NCLQY1O89UEjqUDo1xKyU7MniGzC0TtByWaD1byydYFws +vhD76bIYihX0HRtNA7W0OeqzEanv7TOMiwlJgo6UKsj9QsvsY6TfO5nIRSbw8sFF +fDqimCNuv9snpA9oJ8CbpUmScrpi0lTn8qG27BL1kMc5/z4/1AU+wESQ4s21nRuq +k0tC6A0+xfViPt5bh//jfn0y/TwHZHWwPbtjDmOE0z5JiFoF7/w4eNI8IL96p2IR +fjQldRPZ/VjQKZSpJa5NnNCfdPmc3y4sv904RPld3m72sTQ4Yql+XSf7oNCSU+Jj +iZif0xhZQ1JQq3qfM8Y7Vm++1Jl1wpqSUpehyN4NuBzq4pjtfmLOqyQPbw4GgyPc +J60EBy/JIuV6qtE23BQY6zEf6V3ZKlw4ZOuoYnWaHNMq9mXD/aOEYX2M49vpYiof +oT1aFA7v9XuXJy4nz4WW+FEx/JwbI7VT1o52SQeT5ndTOzC7w/IJeUmhkZ7/Nk0p +fXL+LLpPktIRYtfwrCJLTstiIsP45Q9nwJT+rQZ5ToBEP1zKgUgCmhsOOuxXgTdv +XWIqPZq3VTrSTSM4LD0YkJL+oDDwP4lbF/qmMfDsgvoi+p0WFQ7OlsQkvB/UeLmC +CgPBOQmZSQ290SXxEjYCQ7B6PKv6/ItureWob44JgEuUEpIb0NmQYqNTOcl8xKJ1 +xdMsDjYOOOCmlCBuWxVKK5d9S0vYmdPNnrd3W8UANO2uconxTRTxgxDNlGsmlCP4 +qdOJ+EwbazGO53ntEzgVzRT18dsPHlIWt5CNbLI2UUUM0Msycdlvtik0bZOQS0w/ +LkyF2+Dth2PC+h4/zcpfcl27nPtaHqlkG/WMlPC+BOM5yxo17cQn4ZvMt2kRrzGu +igcQCImwly5LjwDNhmO9kOvbq+mAJMTuWL+RS5hZx6IEI6iOAo+kirQJ/WPwEtmc +wCQnjBAwz+HEskof//eHnFpLmPecFcBwJ3sM93NRkyrRnebaItkwzocwh2s2ayQc +Z2Y+/wgu7t8RbvTekopRUNd0JCweA8QqqURzpbwkArcuX+p2Aw4BB6/LrzmYQPiF +kdznNuwuFUArvB43XVrdb9eCPNRZBUjbNqV9uRBqNvgDeNKgX4JHVgyI7SVb052a +KsrdXBoj8HHb1+XF5Nrw3TndbIFnm9UVDCfnqpRq7pbtHp/i2PH38WekImPSnbk9 +9Tt4g5/4dbpsx7Nv3TuxC0+xcKmocD9arveYWh+Y9MNjzMgMxYumGI1+ft3DLkhl +OEDwD4puesOD6yOr6trtUzi08wSmzTFoN8Q9HpkYc1ToyPkPWhN1OaxhYAKLWKvp +KP/E3Q59MMDLcN9e79czudpsjrH4qvv1rOStdp4lz4807xvXmL+2z3GYIezTIl6o +zD1U7b4+7ZHd+u53G9OLSPqekdWq5ccPuasGbbzGX60tLgmd6HBHK4p+dAOUCR8n +nNCEvFpG4Irg4wvw8TwBluBCnEY325rUYVffLWyFBwOK31LFXaqq76iOz4iYf38f +RheRmg1qdGjhfiAtHFirpVBslSufXept2BRHwgvsJssheh2xk5+sL++4cf0MO4Th +rLOgMCzybmLZOnRE/9740c+TAgG8irFsKrUfdWQAEy7S0WzDkT+CjKaQRfx8gi1z +ypB8Oaj6GjyHFgPBC7uL9QCo5jZ4/pLO9ANmuU1pYyfufzvcxSLX8431ndDFRVUW +iM8yeQEn43LoeA28UvAA2q08KAISzzNq0/EBayFSJydt4eiE/aWX1Ij8qigmbXZn +gTIaiL+p46/NbQkNS/EBL/V5xaFCMFM0qu/2TAiJgepoxhGz30GSU+ZSIE3XN41d +CLEAyqQEEA8JCMbwLqA6bACbeGoZVVuHMhg8HyqQSne0NsDrVbP4wVm/CQRt5u0M +4VXuiQK4BI7FttXr8cNAGcFxVTrcjFPCw/SdjVmkY9fZS0hb7ZAc/rqszOhfYw+Z +ANsKaqzFnArE3bzL4l8LRWmy5xOaFxLVuVEOLk5lsi6Hq2MafkSFqGMWAT3AOlYZ +B4Qgg8xxBGM1XUVGgakEK8vLRvtiOn9/mD4ToONYJ464NpnFDKvdvzAZOhHQY+KB +509U6ifwn1AxN648HWhKz2xibG4HSXbAWGDBKw9Uzo0yQ447jQy0Bd5D/ivmYV7J +Yc6qvzSEI8HCr4DYQvSR59HMITlj0RMuFpeAxe3Ngq91paFAOHhZMAsiZ9zigfdZ +6hoMqOujYGmzm7TbtUorQUok7quUFPhPy3A8+O29lakJ63nNqTj2oVMxh9E2i2ue +oMC4QdVuirxWJJDcHofyimlXqtK6TruqeuUew6XNjX0F7o0HGJ6fXxLSh7OfiTH8 +NvvFggzHxo076UVJNxh/fZH7X9gGmyqeeGOIBSoy+30OFW0CjpuN0R26JuSouht/ +nEO+AopDP5SIImFpfJsXGH3qf1Gk8EMWLcQMT81IXcbZeAG/SWF7HX6KCbluLYqS +J+GhkUVVJCiKWG5oJAaEUitmqCjS6y6CoZrctM0tLIjVwGfa9Tn+ohq45xbZ9tMu +pPdubPp+dqMmioqBU2FRzwiRPOCVSo8H7hYVQk0Eg/anBZlI9Qu07i63REK7WICx +q1JXUpbeEsa/tIB8tcmLydqTH0iUFHwsQFeC5rgwnnzcOlPvGTMD+BxZuL1ghsT2 +8rxcrwFy0/N77jAkv8iwqrzUj4AmGMl7kX3DWkn3VhueVtlNr43FiALwu1hWE6Q6 +w/97JArGodemFPyH13MyY5L2rIB7rBR/2CSyXZkBmOf/hxFAS9/OPTadOk5TjcCs +uliQgWEfqy3RC7HtIoNkRVDZ6neO2D/Zu8ZltAy8m5Iv3ZQmsFbzosuFQ4z4wpLn +gGy9L42pnaFrZTACTk3Yr9MU3eTHaLDmMmuyCdEFXmUm8ReJm3sIAAH5dqUgdSMw +Mr+QtdZsexWm37jKSkNVZ7LvFuBaXLUfDt1x3SPOnLxAi356Wx5rJpZ63WoaNgXJ +vzXoQ2mxWUduAzbbh3t4/4n3bsiA9q/RDuMsHuZgwvzgGGSuIHESUl5fybcKp8eb +WoCSk4Fnvw/OhtOpnTMejHD7Z+g72w1u1WzZkKLYKARAje2x5kmm+v+hw86JxvbJ +lONga18B79yGyNiaosA+62vukWI/eDc0/QYAV1jTONon7IpUpZIUTJqkccXYPR0w +jdl9QNBMLoY= diff --git a/scripts/mount-shr.sh.encrypt b/scripts/mount-shr.sh.encrypt new file mode 100644 index 000000000..bf6f0a02c --- /dev/null +++ b/scripts/mount-shr.sh.encrypt @@ -0,0 +1,71 @@ +U2FsdGVkX1/HExlufrZrqJxtCMZM2UWp8ls9HQt6jqnVlh1yFSm7EoWl1wJJ+hN/ +ejTev2LerCr27Jz/SLcpCscFle3SqKNcR/eioUtlqVMK9rHpTgnQe70rPw6M8x1M +w3ulxtMjNgV4a7LEspcqf6ZByP0sxjSxE08D69XZNMpnzfZukjbjjB5nBst+pxNK +oH5AYeQl65QcMQHZM5oG7DGvZtz331asBqboV9WnRTR2B0ExGKPqyGDm/P1WvRo5 +nAYsCM05dQYs7NiFutfBD6PjCKkAqTakBQbl3UGxdLAfEnoukZwb7fG+2+VsY47I +1eJJsj+TmllNYCtWxujPVURbuEeexOVWUYUIGcwNwLoMF3YIwZfm3TX5xeMxfLUH +lsR+yC8KI305Z3jN68eZivnmjyZO+0nMLUg9/v1wSC4lsNRO9vM/3zUUqNGNuMKM +mc2Oulnfm5q63FH+bZfv7/wzocRzIceiEffr7/VGSZJKFG+TeXM4gZQHNRA8axzq +EIFdsKVjoBrn3tqZGdimsTyeD4IJZMllzIbLXphcUd8h2+xRPA83weETPugFIwPM +cKpRuowZBCbo2UN3MynHJzwCxeZ+Hh5G6LAG4HCEgLwnPt5/HEomjGEQc99HuHIM +Oi2Y6LsWX5TEqyS2Q53LcoiFsdXQAoYp5CJU6boa8NMHz4bkUUMxSTbPePdJk649 +F5YE3wCuldZA5SdO5jpAeh3dsWV9Qn455IGYl9SZKuL2D9XelR5L8PZLLsbVY2PB +P/aPJUWh/H60ntjnzE/l/jyRraiegHqGXTIUXjGopZqjAtng4hC9awswALm2nd1x +TPHHf6DH9pnEQ0Jn9GREnolTinDIYfgKPXrymoMV0fIQxlNg49x3lxnSffmpdxMX +RnohAWzcP0OGjdAkNi13keiTo2akbowTqjgI/+ziC+za4ZUwdxtf2MO9Xg9vowFl +3KeU78sg9ABxihL/W+19aQiIiMeFV1Rp3xSmYB0AfwJlY/gJnrP47PAo/+tBKTmu +gHR5F3JmBd7teWoSigup7dz4Wwo6TbtWuJXEqux6WRxo0wOHe8eRjCt30BjIlLhh +CzdpKmxDgQkTHy/oDxC+pqNgh2QjOXvSqy58+3ywXEZDPIJL1dTNIFzNQuhWbrO/ +inVdUZ33AJsbUgfaMplJ8w/ti3o8uqVM2kezSJcDk7OkTirSw3J41N9VEcoKrkov +kzjDbC9BdnKsJ4wp3zZYzOu8DAmw1N4Io/C6tT23zUOyRxqf/zF3faVgjXMcbWLS +M4ax/JhSvOsl5mCNLC4SHfJwZHxwcMEgIVdpJhM6UgaA2ITVUz8yjXyCyZVDqS73 +HtPAxeywTJes++jQN+6rmjZ9xcmXXyJxnxmHoRSuWtTgtm0A6WonFGFMUyrWlO+/ +XZDGhMivylUlkXE6m/tJnMHvG1FMzi2q1iy8VCf9DihGWhw4EfNb6K8COwyKTnE9 +xyuA4xy+i9IopAr9YYyX8btFjiZGmfvctpSoNVxRXUP5lhtTbztIRJTuALeILJlY +cGgyE3Uu0DhwZ4Ra/ADuIFCZVLJ9tdeXauwvbnjXJHma5BjAiwMJMVpAfhX8V8ov +hgF7jMo5sUIxklCjRnug29MEQ5tPJ1v4LodnbKCdcN4aQgGptpGTa3u+yT8d2NUc +Xr8O4KGoGEqWQAx8jlnwQ23HOKgfRC/LXxdMfQARerydUe4F36aM7d0mSRpyiyIB +UJauYnFBrxvlxTNAj0ZA6LXcoUp0wyvcVLCL5AG61X499UvYXhHC6dDOoJfcDpww +rmWG8/zpq0O9De7lh/4a/NwjS31kHMSaUT807ajd//t9WkCSe/qblWAfnHCMzlhz +nhEasSo2rwEzHWvrTinBhgeun15nBrMsuekoJsGGL5HDV3b3xKVgMQAfMPBRqqjo +Hv/o1UDu52HVqvbnEphvmiObhlzU7Xr2yV4BdXhBrG1TsYejquIbMsOnbsRCrzPq +evuvw3DmFP7kHHcogFR00kqSh2rdM15MYm9V03EydlmijmxlaYv9D/xXOocG+5Qm +xW4qkgb0Ar9kZCwHyK+c8xxbSoe3jlgnXnEev9cKfEzux2clNTp52yVf4YFy3SKf +jQ2D8HK8YasctfSXOQZXfjLU6Kp0TBJgjVECwyrv5IGjxfhQDL4vH1lzIQWthD02 +ocohE6H0mVWToMinuCvppiHnigxp/rsmsJ9x8yGxACOLHmbsI0JpCgcuZUKb3jG7 +j4j56fXxNv3h+rPQ+AqwIdyq6UfhG6pej/PHAAuA7a4oB2eb6r9jCiqGxpxOpZrr +0o96OgVT8QGeqRfRCr7qRr1B5mM1t/GCD6ApNvi3AjO1k6L91gi+KpZy+ZlhOgIJ +eJ3bACGWkrwJjS5bcGXehuzLfzLq6hkOBzUYTcWDsXT7mfglCGqPf6dylF8MbpUf +jUsm0A5fHaNmN7yTJNgWgTZYQPJolfgkj8UmgM72zsrHKaVsYItgd5kcaVAYl51e +OPkRpJg4TpylvXmdg+Bd66gSO2WeeiQ5Oqy3fb8JAl+R0m+rJAR+t37zOEgIdHf2 +IKCE0EZWm/n978dvqD/UjHHUfgt0bT77lm7jmVLl8VSDXLcMqY2gYWcfD2A/EKt/ +LyYoiufX/XjoRBzfIYeBx+eSVbjsDQj2hkwLglFPrIVLe9TOwXkbQMMjCoalUSpa ++KQfg3AgIsKaa0ri082degNr8pT/k21GIL7xXNuaSnFQd6dAR2i+wUWtAb481e7q +OHx10F7DNgl9V6CJ6h3Ttacg8bderWDDF3Uvm7vI2syD9/tcXjuK1jBh6BiGa0hJ +lKdSAp1I53CWRzaR7wZz/ZVxzg9ynfLj0TjD5gkcnGPWa4YW3TcgS0yV9JnDo64r +cj1oJKdhHVTtFGw07VKkhtj+G83NVsBZ2EDRxYvDr9xXHLeX6kk+B5Sy8WnjGpup +4OFl8sZ7bInSBLaqVRiX92+vfU+f/yNPT7l1z2E/mxYOHZYOTDgyxwo0FkD1a2me ++9f3TKuCtjd0yJ8Wg/Jj4PRzEeutIjsOSQbfRI8VrncFn17qDjafcvrWiwrrRk8g +15HI5jHATDyPDDHFLqKLwAKhL6Uo7SOyP2bSiVYCSxPMCL2R2lzc72dfktTKIkI6 +zVwmVmKpmvPGuC+zxqLsc9ypAWzS3VNpxhprtTigkYeVi5p4/mj7ablylAp2cb2q +jXknMLumo7zmPuirB4yJykUimF8oa5QLCBfKgMxkRR2ID1BFD1GGp6n9hiO+SpIR +stsVt504PECmXFjQs9sXuejHuCPz+wJCkzHCxoiUaXmcuLpmFXTiKcebaBlSyGUn +2l7cVVS6aUX6qd5Tvgp+fP72JS1X5OS1OAimqcP/+OTJcE1wlUDpRrUd+oHwU7HY +oWT6Xg9u3Q1wCPd0v3ex78Z8RahhYLUYO2S1m7w6IQ1mq2I82CsTBGGO+SiMVDdz +61zl9atrfNTH47WNP9/8Wg9FzU8OjRkAkKsv8elnVdmBIUsr2FxFLFM3LuuphGFj +MTytcyg6Ff3P/vpObB4vSl+yFI8CPnPI1EKqSakUOOkASnSqHdXwGRrL82SECX0B +uddS532hNR2VRprkg4K4IAcmzA24NP+AjfOnk81sVEGW3+2Qd908l6XP4ystJ5Rs +OEwHo9/kdGlYVLiNvnkScjlDolXuTycCQFp4jHgXTphuGyi9GOhAlCN1Wvc5drbM +QnBSMXpfOyhGLuvv2DP4JabpwYi+6Ub24kS+H0tUrDW/n1EOSoGTW0keJ6fEcTwS +vHtsmUpymRchj9b5DT+37/BMzny/7zhqP+U44A9AGv0HdKxo05u5pkI/QQXSDTDg +S5bjOf2Crl+ITHml+SYDSqGQr9S/ii53mn3/9IjRRIHZJz0PC8vGL6fxDCkX26NY +BHQ9BUeObkCLiKaFrK7ppBtuM7OGNvBNpFF9/yDb0yIsq4vXLDuXY5tHOlVjqhM6 +Gzw2UKqvlqLgZz7bCH3SR18cXdV3GLR8Zi24wZhMaIaQb3z2TdlkaZQb+5G7MSzz +zQ7/eFkAmn3kUcxpz/cB9AG6/0yLitegh8YJBQenLAZMiDtxNs5mOk8NIfAQ2CeD +E4zYQJM31i2h2ELF+pVjcFP0d7RXQZX9z+ni8ID3fkqgBOezjIUbdqipf9HpnRRZ +/7k11CVTlpAa4OZS25fTLFbOOUWF/fLGTVN8ltAmZpYis2a8f2sUz+P91KXP4X0Y +/1S9w9EeDCjTB2CCAm+vk1BmPTMppg9KWYtsOvfnj8n6Z8U9zPeRpPcrhiLpuxIp +P8M2/MX1L9qxXtG9r7BROhBv6vf+LFoXOiujoSRszDT7RoqIw7+trYsVYdV6oH1g +PZJ7hKF37udRyd8dKTiv14JIzYupt8xfleiodcXKy17nm8DHd3qajM67JBXTNGwE +vG423n1n9g8Ml3sS4XBPoDew+xSZqqcXkvWnd+FrjAbU57YsoAjpVbrI05DfQrWz +FONOkPc6DdilSd6zIF+taMRzcfNlgRDp diff --git a/scripts/profile-shr.encrypt b/scripts/profile-shr.encrypt new file mode 100644 index 000000000..8d3ee22b3 --- /dev/null +++ b/scripts/profile-shr.encrypt @@ -0,0 +1,197 @@ +U2FsdGVkX1+gpzxWmnpkysVLsD+byqA7x+5PBcBPWBXAs9WXD/cqZKOGfn6a13+i +hb28oQSKA2kzskuxLcHObk0m8xlaB50LeCULFkqdbNPV++DXAMbflYzTSJNk3oag +VbEyjpjpma/1vHQxE0ImMZFmFXkWJJhBsl+yXlE9TGJuFknYiVAA2yj546OMLceo +bsppfhoEE8QX4WnH/uWUZTRro69ew6jaEXvPiq2HoGtL3IbTbF6OKj8BCWiYz7vh +ymHBtAD9Mn61Y2a/vSYhmOwPL7ckoCSkZRgr92If7hs3i9poCHB2h0AeBnihdzMF +r6jG0L1W/mOTvF7ed2xeGHfs3iocFEzrISXndqo9bmbMjyqlKLSzgEvnT2Zy5OJw +CJ5ehc49bSkjXfanZZv0f38QSjiCE15HvEI0GnjKBHvjU9sP7Lz1ZmFars1ULikU +rOMxgObgS0arXdiWfvBE/Ybdtcrg3JhAy9skZmJ2GL4Qn2PqmtRmJGoIkZTW89Ld +NHNnnLLcyHiBvzC0ANOp3SGLqnJgf2K12mSsRtdoIDwJoS52ylr7ihL5LRCOiniS +ZGbCI92mjkpUFb95w57a6x3y8TlDy+HIu0J2hUuCGU2Lk9xG/xEn742vGvGzBJ2c +f5N1ttL98mfn1w8F6fzxz6V0Ddi5dsYeWwsltq60PVhYbsXxvHyxowgk1ppSo9Rk +KtqP2yU930QkO1Y9e+0ORW2mwuEHRi6/eKByY6vkw7S20n30Jh9VtC8IolXve5T6 +m74SOg1IgFspVcx5z90kq4bgVr55TjwBTI0uCcADaCSVSsU3fvOhutxoBRl3scmZ +/5k70UYPkz8TSoC7IYDF2fXbL+wst7sOHOTRaFGWKREUiF9+j6fb3zE0B0JJKLeo +pngQVqsTrcSL5+le6fG2rPIanZgJjOk63Ty1X/Q1pJuotdIr4+dl94cFBHNEaBNf +3STVzuZGGTcsDMshIHDv+t4D1jjRngqfirgnQyIt7lWP02uT37hHEpt23k5hDx6f +Aumt3sdgWWVuVIZZRIxbjiZ0USaEzF0+goxWBPbGyj+a5IENAnig8x8oNFiZ9x3d +CbRekDkP3lBrirOT+YHFypPa3vEnVsaEls1SWhjxWV3tseM0l6kf7kXfByLLfkiH +ARLdk6o/zg7ECrzYepXez2l37YkFp9bEFRX4yDD2yrE0wz0RchJQau2SL7SaDGau +IBkzYcuSYgvlPvN5Ah96HyYpyBbAY3yHvwEuNcZAF3N7xZBk1EY5ZHjPdzWHz4Je +8Y9F7lr07TzrWuivZEJifQHEEHgjsBC7YMtbqnC/hOeYkU/PpmieqfDE/PLg18pO +7OAh3C8mS2qK6IdR0kmdMz/V+nm3C6D4gZoLpQtwWCheCemVIzdsqpRbqz/nUYd3 +P7x+gy6FUYxav/J6C5ynjr3vhW5d+qbySdOGQSg24R0A546xu+kj/YSYYJPDH+s0 +0pkLpyDOBU1seWuxwzBXVqlhNB+ROZCSiGX+0rdqME3lOVbC2Qv6hg993v/0iNrc +PmCNudPVPU+8AhDFnYbL4io6ivjJsyvc1tU2H/82grN9EDHVuQR8Xfo/k/csxWbj +WhDqdiktfdjLO1p2inUwUeFk4OhpsVP9ahl+9LnhFA0i8Egn17Z7cNHFbRInZeFj +9xMQMnXYfXHx5ZxjFlRZBOdpByxDUHCBDrf67g/14TlO8RAednIrtLandH1s1DvR +ZamBL915dPLsBHQk7IM/LoSw1H0Xbt9O5MazgZzLiL5JA6rOtHvGauM2vztsjDdx +iritlLdELXgVN9+ivJQy8h+LfuFduH4ALGG8BTVx+5eKRewsX85kv2Rjuz61oaD6 +K6O5nW0UnZUOOaayjyBQE4SNsU+WLfFAVZLbRqjEE7txBCvPVDYfWE75WhDmnKGy +HsrV7be5C8uzyHf2aK5W3gNHVgzzKMwNDstGSKN91LlN2kw7Vp9IG9J77p8JK55+ +y7n7ltooIECIqzb7sfSpB3bhzjkejuJ0f8fbHT0WiTAJcDLmnlSUAhkYWVW6ioAY +opHE+3fgajlGkG0STxA/kwW38RF2jPp7jVsGo1iySy63SXgi4m6aYPChejAAYMXT +L1eDkUElDrXWiLnXNjbfIzlrkoyvx1OQGDeST0dBVSJhIPyqoZqZV+WCwFQPaFL0 +VxfaeOdq+qcDVZhYgyal/hc0cwGswrXC880XZlxARR8IbJE8U4N0XfksFzjMFwtU +oq3hAbDt4qh9RRpt74GrIduGBEtn0YEBJUZCLEv+Rjq/dtBqDI0albsWVMt8lfSO +2cBrFLleQJsMo48tu/7aDi/fzWtKJhWY5c16SzTtrsr3/UQY1+IQ3wUui2rntGIE +aTMU8Pgkcmlk0Kt01KCdnzgDUACcDyuZ/xh7KwYzQ7R0rUMjizohoqOeUxX9tVGR +4yztgyovyl9zQfN96BMT//OGoXLkd5ZT/YJ16MLEy1miq8r0W/PPh4RmAyW5aLHO +xcsPaLIO+6P7m+BTnl5swTYbGDzTNAoSkawJRvi0kJYJOga32vIwvGgUnerfQMjK +Nq5afSmI8MrKW99SWlhAbkQlCC1OohUJWs82IULNwtlhLdGM0U2LsjZNQb0w1l1s +lOluXeU6ampj08bFE3c/3aTojVeTym4ALsm8aec7ezHDEfESIAb8fqq6RFQmjeim +JCH6wsDkkjmuX0/5RA4Ke1mWAdfULbzfMZAAV9k0N7YP5TQ94lSeTJ322qLXD5Z9 +93J2Qg6u8q5X8hBX2vnrJt1dMcwPwgRxDQBSUx5ruBrip8CPEzu29k0D36yufdoP +PA9BlSwj0kluxzBFpQ0iLb18qE48gCUCypa4jw6tdpAb9eXR+DcTR3oMEhVQoCld +uRrK05ehQjpjk/oQBNohHheB3zQhinmN026evmlotHufcFvTf7tQbe/8LJJRLWd+ +kphZfBsD4WFbfBu23bZBIZoYnmfGFcEGWjOF74OYhvzlU8It6AMDyuzkMwuk70kd +J1Oj84Y7ei2iq6wxiQlfMYSaEwqRRK/dGvpyYjZ2jqayBwhcRkFAkZ1B6AEAOMMp +moGDzGV7NJqr2j+phwygXXqMs7/ehBoxSGw21AWolNxWTJQEOws0Ld/ZZXVBsDw0 +j37v3xyzecO9UG2vWGACcedqSwEUD1IeOF0gRYuq3F0ddj7RBy88s2tqw07pbEXt +6JceZl4WFW6hk423UC1CTZ8vhqhN4565CHc3W+w32rOgzi73bzf7j2OKNKV0R7Vz +2aXrDZAwH3RmDacxwOnQ3aovpNiUPs459bCB7DieJg0rS1q6PnscJNKxUHA9Vmgy +XRwAr4ShtZy0F2TPwj0Yrku19/BnkFRJwJxEZEX0JNTgkIhOeb7Rp9VDOg+S33/9 +o35A2gTaSooNedI0UU5V9vvTaUvkRZYQR8RkBDEqoTOGWYdek/XBgpf3nDN+f2Pc +Vo4eiGisrXjNkbFB/QLWEV7iukFtDNUxhVpdJ0Q1n0mdWtyg1N2Isc8WyIlXutnG +q9eH1PbeYQ0NKcqRMBGOY+XMVaHShXMDeQFYerVhRNBZujR+cLHKvQKS+Y76Cv4N +61jyJbmf9Sih/IEdBoRQn6hAhkYWSRmyMSvKX3L0fUdAAYMoIoHY24fS24MJ84gC +NoG7vmN0gbtC8iGSFX3o6kIdyE2/gnf3iK1LqQTNLDwwS9x35PosbIGRe1moIDyI +y7FGaTOFXKCUnQ5dcOPKLnHyUDGwy4JUKfOfKxG9LZUhoYWLQr8YrXEDzMjulBjJ ++cUq262+b7o+TtBL1o3hnTaB5Nt4bzk4FGrzSpeEghWYMTuotXDjwpXyBP4dYhnH +3srRrO9tZUn6F/mO13vGTrtAvwdtWommcPPPWAwK7BQfe2Ux2vpB+OhFOiL1PTA0 +KKu/a4bLBoXP5hrFi3vCVesbPcK/IIPPwwbH7XXwreyV8mpCy+C9/s5SSN5hV+7F +u26lZGUe/oXprXufDQ4dAIY+D4ubbTiKHsjRMYZx3AVWW6fkqsKR4BR9tkko93+O +W7otfpJqpKYz+MtrIqSpkng7FyTWc4Lk1CDwLwa2GYP19xd4bxP0T0qt66Gz98C4 +N/lnHtzZ+7yN1IMJV41P4mntNrtqvXpugA9b4r+A1woFGBiLNJDLnWI4A3VnskRT +d9IXq8e4ODWo2mg6WeinVi446WfZxy4a3w+v5AEcOrIE8BkP4ynxK1wzcEZkGUO8 +9TT9EY7llqNLoI+Fi50MokYcPOVTUZ4HLLQwUuKT1KqGFqgbYBpK+sZ5xS4NdHRF +lIphD4fGTiPTjF6ZWbbTCnWwVdr77f+tOlHPJjxFrJtWOthjkWH7lHbPIHIB1qh+ +0hbamNT2VMTocdDr37v8NeLjKGkaRq7gsAacv6OcTPagfSpOmvj7+pebu0/M0sHj +Y7/sqGEonjHXoLh5EOu21qWpTlo15gBomXhw163HHN3AmwiK4AI1WhYJdsZ1vQky +IpgvHb01qapVm7TSmnv7Ja5yC3JRo3+hoI5pI8CUefBWXkoWXD95DUXomZ9EakdU +gBoWqbTMFZR7MZTQyVOoP8yFIzqs3wv5JRgAy9hpRDIso5vj/GLkJGa95ETbTU/t +qOnFe3uGf5+jbq8+8lNffUAJX6FRCMyQTS9uaSot/P+Z7zLHBkCOeALacT9XwAuY +rCHJtajij/Y/mF7OZBlRrqhT08+1yUn+MH4oi9S0oHB+603tRy7dIk1xFZ8ohujb +soqGjZ+lx7nIPjJpe5RkZ3FlmSYCEWuWjNVzWWkkJmxuOP+NZaVjnaBrqJlwUy8M +6Q72Ixhs9CaP1S6ICC8xL8a6ky0+5QrE6eie4nX7mqxjXn6KatjVpB4TJTCPiEpe +k6nkfSfMmgWtd7YEb11nlrlnQGTtfy90ZeSChRLXE65zDrfRu0YPkg4ytvoTISJ8 +HOhEALkN83rXQaBGDGsFthT1i5bOZIZeC/Xu3jtANMjQBMqnVTp+7hBAevvCJDsq +kOd0LAsGYjgIM7jiQx8vEynu2l4ORx4s2t6PWjbc8N0TZkwtKp4aaEt1hxbau9IQ +xgtdICFeoS5og2nV1cmq5Um9eVH6yFr0QVXnRYXtYWw1nanWXcJw8ZknaoFIXo4i +kfJLxOLs4z7gekec91sY6MQbO1wCbp5qAYw2Y12GmJZX1eBkvIbvGoIEkwbdASe7 +00mvbqQT/iYMuaAeMCdLcdqTHEuITF/Rxi5/QRkvXCC31jqOtmnOGwOZ93yrngKi +y6LlZ1t/QPNhs8+wIrMGbeynL4qMTBhxMK6jPWQ4iSAKOLIv9kOUL7sKkbBXN+pp +sWfFnWIx1EkoM3CkBNK46XhHyNZ46zjNcuyJ02Utb3Ls96cnnt9onRuzwRfp+dsh +2PPEk3hcCZ1Kfxl/LBIIaJFLYXmvNK4MHjzFLrYYtu7zAMlBNXJZ0RQfVo0Qsgw1 +vnlB1z0Lqd3Gulq5iPEUlOl4Ii+/vjXrP2WhZmjOiplwmh4cFNsgrwxlWi0K3eI/ +cf8rBGoe3BzRWvpEx5cthEeNJ+YkwiprbriyvBiIiw0UhdrC/BqTXJmy2fQIdMHB +4jiayeB977k/GdnF0HWRszVD9zEHUL47Kgmm4QAzsAK0c864dW0mepf51GaZP9+i +cz8VL21TNLQ2zto52eyrNKrG6sd6XEsybcd056xf98LqoNxxUywqPY6ENgMK1VaO +2Ce83JBpZUAKjjTNz3PZr2lcL/epnzzK+UghNCQFoAbffkYjsnUng5yj5CGwg4ST +3XpXvUio/O9Z2oV8hhZY8sQ5VGiwMkRfAufXKqwY8SGWYQiurUWBsk8VNmvs3JC/ +7o1Lr/6ZEiJxdzSUV0+o/rmS0DpaT9oMLYQ/dm82gPpCsVIvHcqqQfmqIIKBrZA0 +otpXUIqfyGXBB+yU1SPKPwGq/Proif6GQq9UROgN4Y2trIkHvXAu5mZ2Rf09tOGg +DoSj/GLhligvN3eh3lewEO8qQJilc66pCSR4OcwJAOrNnkg5fMQ2/9G/6zYxwX8q +Wo4FJKjTN3z9iWKThnklmgbhHI/Efcg5cFBinoo2EH8gRTda9YvN7MST7dXaU7zO +uQP/eYlKIBdyS18d0mNsssBQpMFs8c0W2YDinzXTnXmXMgpGOiBiH2877I6VTo7l +Mx0SiQ02tsa/rjknBPRTKAwMrktPEoF8iF3dNnzk1Qt2OKYawY7zAECwRnnZhBlh +z8dW67jv4DQmeB8OZLesaWmAQM7/gmw5ux+W8sf0LAonhE4dYSbhLjsehEL6zHW1 +1UazjQK1BycAmxsB09kDP/G4s3v7wrlYT0Z00yzn6nl8OBdSUctjYXESZAcTYMzu +xH4vAUOg01YSLCmMNT1gE8GquzSLHMJHw1xZQdXtucEIm6whVB+35I3nTiyfyL5H +OaCt/zb1TjfZkLNhSS2XOE9C+Tr1NX9obgGH2syljTL+dFs/CkIfy7iyhDL9Tlk9 +evlyvCbvJpFlOCcpS4BKNaLICJucTnHy/eZN83ooLRdqKCZngZUtdXn0QY8qGuJm +tW+AKV95j882dZFEz0PR5lWfpj9FMp3ESmR5asb6ciMZmvBpfEIivc/n5YCZxvDp +W+CmjY8JZfFKmlO+E09YjFK3xSHvwtJx6ZY8Bfrj7LBflwHIOyrVIEPqiOeWxrVu +VTiioMKI1bwcAFg1Qt5BL4Aju1r7cCKHUZZKsoocHfZKkOXMrGmlwFLYlsNoyfD6 +mbtYkwYmQkBkVInIMHUeGsFvJXxwSremq8ZF5fopJofTGR1kUqBWm9eianTYo/vp +cSbykpkD/LYONnIAXMH5kOtA+sRI9EbiMbjgR+NnTUEZgG9fyJAbEA45t0d1lchr +s3+jT+wwRm1PnIDW6cleD2h3W21A620aXL1GbTgnBf42XR3E5GwBEpYPF8ZFjIcO +jbiMrOQ82QcfjDbuxb1j9Bw5yJ95Bc6GfVcfS1LYm6N6s6l2Rt+37uF4j39WO78T +5dcvI5eXA1kuenx25hZMYZ7SLOAWUCJIRSLBtgLZx1B+hm4K/ezmrdbcMQsXbwjP +5sfoWZ45uz5b6I8eYr7HK/M6uiTNJRhPKJzsGlboB4PyMZ//XsW70JZNCYc7By6v +Gtu+HmIbGVVY/jwG/4nOF1nXSpnJZOJHXEgc7cc4hD3y0579maQsYF3ET2m6+iGQ +Pc76eMu/GF5RnK9XGvE07Vej7tAw71sN64bkn8ylVAUhXtHb+Wf7hEh5QDTCjAl5 +pk/OEoRrHikD9IvmYtMFC6oYnpMSPlaIJGC40dlaTjpsfVwQE1NYpM1piIBViKpp +OQRzC5sQx59F+oPtsVMUNyu+mXMTigsWMzmgDWs31uhpL6W09KpdIG+rhlRDnit/ +RU1nUqvTgj1Ix1KRQYGKTQcROFE0+kMx1sFHr/4vvdHK0HyQNlEXEvN4xqAGc09F +X8PTfJGVhuS1csWJqNWEUjJ8DPhIt9IKupX9VbJSxNMSmeYWUBa8jD+P5aQpHrn8 +7AUNZCIDB0VHlKrLuoKCQXSmsZn7VExcZenko3EsvdmEmB14jBEtiGgDaDDAqFR0 +RMh7kqaGwBu70sGqo+0T2wi8DVtb0CXHqj3ja6Ifu9ycrLVYF3L1z/i5wqVYV+Wm +mztDBaPDFHtaeYFsXxyV4FO0ygiwdpY8nKhrcuE6jrGObgtX0+O0ccKCpINRr3Qj +1Q6lGymxQjJVgFourNcopk7Dveq3RVzv6Gmeyz+pza5fy0OWDeX9I3Azi51omod5 +/0qj7PdQtwAwksc3j+bFfNBDtU+YBmGXK/ufAZRwgBLqJKRiHpqk7mtJ/wbls90J +5NTsyTsgIuvSl6CdgGS5+Bxpq0UY2H8gpHSOLeO+tnd+bDLEUlRT7uUhNO8vm+l+ +Db13aStdAa4hFrjA5gmWb5PWyJSLRncZRPs+NAXGSUZ3Iu8GS+fGiwDGtZdui98T ++GAEqd0kPWkqCwjsdlIIXS8m8C7iNB/WGPcx1zrhQdQAEucMM+ZREn6YPwxcdqwD +DIiOJwnv3+iW7DOa5xPVwWNLIg/XAMiG/m6+psStSjuDRWHp78qU/LYA4JuLjnZx +XUGlb4/YCGQ1FeDNtkAlu0Ltlw2qj7NH2j4DBMjcTJkSrf9fyyF68nUnjIGc1HDc +2gzzAru05YD7/3lUrClaNb9VMK16sIpQHYtvRnFvJimZKBIUuo8M5lxmM7S6jnwr +ueIKKxOLlSA7cHx8aM/Ct+6sRu3K+5e6J/5TGDpab+bzPfBERGb1IohRsTZDgO6S +uflOEfq72+y0pxjAUAVyAQVw5zZEDm83OhFYoCnwr3qi3kDPL4+dqUW3/zaSXfGy ++KZAvteWEhsoysiWze6hn8WLkuH5lIfeUm2nY1rJASTb6Ob/tFBPXOFo0IxI8gMu +GRH6D6d3Ff4uWky64LX6FSXS1q2aaG9xRvn6OEUkKUQkeQNuuLcfcwGj7CJurSui +BhKaxFt+xv1TJK43G28mPAouFak4DryeTnrzJIEUqBYamNbzPcv1jpi9qJgkHaI6 +XfDbtSxGZR8EXen1cf1eW4gxFs63uDT4VZ/UbNM7TuqbEz5St4ti6ztY/vbiTE0J +l6Js6IGeeKQXVDin1EkVP5snxkxUO6OfAOT+K2r5aEUPVrY+GgCftkghO6fnh18Z +sb9/IwD398fbnoffswB9inAggImy9DUFEcNwZrpre1ehbH8JNQkeuIN3ssknMu+k +h40PcURHBIyIq/pbznaZtWgT/rPTa77+Y0C9nIqyBjZHuuZ7p9XScl6RvvpW2rRS +MQATFuI2uPyF86A+Mir0qf+ukxMj5229vZBi7BB9MHyHjB+9C2GwulyD8tCiKY4X +NLTPjJgI2MTc0rnbyfzT4sJNVe3LZj7mTgz5MEFW5pIo1h4Vh86HLT7rS6kpvcuX +EpK/Nygsq3KaouzizP1xtPl7hIizFZnyaSuR8Z6hS1GRhytlSJnO7NdA+tHmjpxl +mvyjyQruQZatqMl8dov4KhcBbmmdA2twr3nqCo4/J7evu4hou46USYZyrlwdECY7 +nIrLC+23FnVrKCLLPwRccRAdyLuG2qNonGCu+LtD+OvhXj9HfmC4h918yzxnWYwn +UFWTZ0DgeNb+vL1xHyzvB/ii5qpqKfFeeHSMSIr9+y0JS1s4DOCpKdxsYoukVlt/ +Qz5aZX1+zb6vP6nLX9LOLn3ePtOz6So2FcWumraqyg9K55dxwONdjqwvTvhCSlbE +BH5XjmthhrxJP8b6XAzDUaQGsBovVIP+fBLhjw16pt9zYd6AzpJV64cDW2rniG36 +AveUDPvH2DVFkDwXhOEDC7tZLm9mNfkCRhYrZNSYSpXh7MvpTsR/N672xlSzMqaT +XbHG6Rg8BF1s3YZo+vLevfsaQSSoJyy1m8fSGvWxA850czks85/h9jgpOlo7HcbL +PtYyX+BwAHTQ/seXT7edUDBES+NvzoBgkV34Us1MO2p6AmBqgujLl6cB0weyuE9y +xeyRjLiSWoZ+1dSbwbN5fu6tMrtiHXApC3RMP3EnoNq2dL2OOvF4LaCPb4DEloKn +hIZAWA6DFDHzI9vuHSdTmJMz7mBdaWS8Vp8taPgxXF3+c03/jMnYvIRs2DEyysvk +vzmuj2oWPCc9lww048APplUUPyWs7llhdQwkJQ9N0PKUTRhFqT/v1+efMOzYqBOY +XsM7pqLietEoGDN0TT9U09BZYQp8HDBGiddtTwUPXISTwTpWRYneZnql2mEGKyYX +0ZbyCHlNxvA1tkzdi3KQDlVffHOQb3KolhYvUQxx5H+erTlPntPQ/EWljxpxpubG +wAhwzzeZcKbkbOfhmM3dubjvDsmM3Uwna2uwU+kZTV4Gz10dOdPQYvcFVyTVrHAw +W09ULjNUgeGxOFBEYejqsez9l3dwFDL5L/FJ0dshU3QKV3qjDkyH679Gj2wZ5U+h +MhtgUF7P8qSkJhExNjSAEk2TRLZDr3aTp08gBAbcS9oPHnxxk9bFqs99HHXq3+fM +DS/xTf892IBsLpU/2n76hm2YtjQHh+ZKpap4hQFFLqTFxboKiEgyQVSy+RB8aPwc +nUmbV01EKE+vOnjx7hRWk5SuSvRVq6UG2bRzqzHuj8Uo8diuWaBxuUEE9ez2GBUR +2NcgD+hEn3VeZPRmEO3IiLGnHVhqyAXm5EEAe76PuzMW0Z+h0syJv4oVnMFdTNTK +0H1Ch9pYxuWUNF89/lbpVcVyh+w2damDLkWdUhm22Q37oes4pVYp4A7recogEiqc +mDLjOW0LlbSPZWPmi0GlKpRWde5qP7uzQY85ZenqemgFK2eVLBfNjh9vT4paPC/+ +t2QgZHqlqEHoGmiELPJdOkJpT7Prsz1IkDFqBZ4dUdnWZ+xKSDX+/s1ZZDUeXsC0 +kNI22ZAVJQApiBbPM9cVtk644nKuz3GT9uqx/zoIA0BK9aKu2LdV/ke348xsAYM0 +DPenNqS5xCrMbi6NJOyXIc7v/Ch3J/ZAyo8Qq49m2GLlTW0pX5e3ZeHmGzBNgMpD +T6fldc19wGEOYQws8GU2dw9g6iUC7lwfv254LGeaLuQ6SrTpoXNgICY/ZaJCeZ1E +TKjR4oCzPogIqH1LgJ9RyGxKQ6+jNUOGWf2+JcDjCpGF7Ndr5aA6NiBNuwqsDLyc +tER6AZuFI7HkbxCZtKpiGtv8LL5eNSelH1sUol9jBSSqTtiM0rcdz/ZseIdTrNer +ii0KwQ7ZODbfeEuXk0l+3FiCv/ijYFy94XA2q3I54TOEdAI4au47/koJn8Dm6l7z +7VwxhIEXBrRpnHzwB/7m6lh5l/WctO0sjhKCDZa50ro6jvtmn+MUvcv57nmE9oWw +snlKO60Jgqm4kqnxQ07rUB088ig0sywdliHy2n4p2Dbpq7QaBDA0n4HPTIr5Fsxh +Om3BDMGFnrj9yRbHV/UP2cTUOoqrxhwBNZigpkl757LGUF16WIj4FDYzIOPElupr +I3ae3uu+bv2Qks4JSvpGyiBluXxfnStmGJZVjScL0K5bvANqVzcGTZX11wzY1ZPE +3pZgWWsR19lmKGZmx5cDv3UfovSwVp4Lo68lmJFCxttqaU2v3qCiiyxjvjzBIH7M +zbF1foDmll99LnZQa46KxFlAvv/hzaZeqhsLIETUGxoHRWMEG3l/qHlBCSrNJq3Y +KXRhYlf6vQYIrx1//FPeCuGce4nyHnmt+lpRMQWut9EAWDjPRJDbYb5u6iKdCdXk +cgYUtHMN/ltJ+tFidiok0FTTvFuy3dZxDySMIEOhcQdFqkuzf9nTn/pEIXg5OjmA +a8Y603QM2EiIjoXH6O2bXJXtTu7DUVeezpX4wfXz7l7j3qSLFvbI6WNfIwg6B9p0 +LJlawxhlEZbbqZJMO+8OmwP7+sHFGIv47oBEUNlHszLFN4z2r/PBeoprGIo8cpsf +pYDf38JeYo0hh2uoWOR2+19KuFNLRkOgD2nMlQUJKsHvHm4UH4IXFjZccHZVJZ/K +tKdhAeJ9CFLTfAIIn4jzAvrkMdV6tbFk+e1hY4MKribfTp/uoiAuukb9C75xL5uB +qmeCOLge1s7sEGWlgx/eGtiXRsja7cyehyFyBajrtUN8crRjN4D8FMH0meQR7Xgw +rKBEWNlsc/KJBBnJHKnzLeNDkTW/XwKDX7RWkLmTbgwJtVaFZZGxVYif4LZzLzxf +fsJTza8ZugP07TFnivnsjUfwfpc5Cnv4atcs/uoc/3oW2Z/icAUHq6tfY1I/DwGW +QgWrqZq5WD79fSjjgSQsLyEgDIo12o8jlISxlcAl4x2v34spN7ZNhyLiHFZT+iNX +GzkdXRmCQTW5wUFbdaZW5ZdCfGqsJSkp6OhHd9Sk5gXfBNk++nK5VhQqCeijFmi6 +P8HeZO2F+mm1ZBS5yFXLfE3a0Tgut+fey5PIbH9Hja3Fi5sdqephsDtNn5GOYIqI +0MAnpTxST1X+tMuS63sRZ7WqsUvaCSNL2XKLDD7yMNZ3O2UZ+AJg1c4gk8X4XcaC +12w0eQbn7EUJkasi8DcPnhmp65+rLUOVtm/vhzVTfkS/oZHJIptfECPEdVy+0VYL +C0getEsr+wKvKFLTZzBuCXOCZvUSSIqTnWKzFCaB9vHTJr7Q4d1HOQQY4xMHEbxI +h2ibA+v3Y63Rpp93w/uYtJAfecbD1EgbFfvjDySVorJqag7AFe7/gYp+xWL7glNo +GI6YR229uticisXy8JmsKlXWzvegyllYOjVJ6JkIyciOKfyr0TNO/X/Y4qa6Hb8B +SbfndxuvvNbpwQ5Lgo2K5bPFTQwyIglIe8CFoFVzzedHTI6RlaZJsy7c/0Nzk3B0 +65/8k4eThGELoeLS/mIwWBT9EY1Je6grVx5xinJFOKhBQbPlWw/aCpyQCzhR2FXB +Dr/5tlM7jrUGjoHrulyUTNVvmPqOf75dsMQwWcX3oaWkounXhZ3PRAd0l2DMNoAY ++7mw7IqYT0ccaoz7+e9YiGDRpNfhfyERHm3JuvDkZedeLtW3HFybTX+1ll1AGjy5 +oyfr0ANI3yQ8KxcvBjG96zB7x9IP3iGN+KpP2kFaEQztaXbynZjtYoN7R47Hb5Ee +525wPDIalX+d+IK3t80fi7lDPBnYAFslt1GhsCsEody0vCkxvuvC6Q== From 2b14281e1bb6db9fe03992af8a89c96a222a70bd Mon Sep 17 00:00:00 2001 From: Demetri Date: Mon, 5 Dec 2022 18:32:51 -0800 Subject: [PATCH 010/413] Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos Signed-off-by: ddimatos --- Makefile | 84 ++++---- make.env.encrypt | 542 ++++++++++++++++++++++++----------------------- 2 files changed, 315 insertions(+), 311 deletions(-) diff --git a/Makefile b/Makefile index da868e7b9..f0f6cd9d5 100644 --- a/Makefile +++ b/Makefile @@ -58,43 +58,24 @@ divider="====================================================================" encrypt: @# -------------------------------------------------------------------------- @# Check to see if there is an unencrypted file(s) to encrypt, you would not - @# want to delete the encrypted version if the original unecrypted is not - @# present as there would be no recovery process then. + @# want to delete the encrypted version if the unecrypted is not present as + @# there would be no recovery process. Then check to see if there an + @# encrypted version of the file, if so delete it. @# -------------------------------------------------------------------------- - @if test ! -e make.env; then \ - echo "File 'make.env' could not be found in $(CURR_DIR)"; \ - exit 1; \ - fi - - @if test ! -e scripts/mount-shr.sh; then \ - echo "File 'mount-shr.sh' could not be found in $(CURR_DIR)/scripts. "; \ - exit 1; \ - fi - - @if test ! -e scripts/profile-shr; then \ - echo "File 'profile-shr' could not found in $(CURR_DIR)/scripts. "; \ - exit 1; \ - fi - - @# -------------------------------------------------------------------------- - @# Check to see if there an encrypted version of the file, if so delete it - @# so it can be encrypted. - @# -------------------------------------------------------------------------- - - @if test -e make.env.encrypt; then \ + @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ rm -rf make.env.encrypt; \ fi - @if test -e scripts/mount-shr.sh.encrypt; then \ + @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ echo "Remvoing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/mount-shr.sh.encrypt; \ fi - @if test -e scripts/profile-shr.encrypt; then \ + @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ echo "Remvoing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/profile-shr.encrypt; \ - fi + fi @# -------------------------------------------------------------------------- @# Encrypt the files since we have verified the uncrypted versions exist @@ -102,30 +83,45 @@ encrypt: @# -------------------------------------------------------------------------- ifdef password - @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh - @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr + ifneq ("$(wildcard scripts/mount-shr.sh)","") + @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + endif + + ifneq ("$(wildcard scripts/profile-shr)","") + @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr + endif + + ifneq ("$(wildcard make.env)","") + @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + endif - @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env else - @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh + ifneq ("$(wildcard scripts/mount-shr.sh)","") + @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + endif - @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr + ifneq ("$(wildcard scripts/profile-shr)","") + @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr + endif - @openssl bf -a -in make.env -out make.env.encrypt - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env + ifneq ("$(wildcard make.env)","") + @openssl bf -a -in make.env -out make.env.encrypt + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + endif endif + ## Decrypt all scripts used with this Makefile using the user specified password ## Files include: ["mount-shr.sh", "profile-shr", "make.env"] ## If no password is provided, you will be prompted to enter a password for each diff --git a/make.env.encrypt b/make.env.encrypt index ad7ae2396..84560ca7f 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,267 +1,275 @@ -U2FsdGVkX18+W4d9i/Pv7SPHW1HcDa1oC4SgUC3uWVpK7kaXkcdk8lV8odYGtLxf -QoKF/usOm0CkUndeJRTcHdPL4wXDkOp6edyjXo7+7DjopKio+g6YH6q/HwTeOIqR -EUajgflZHSDQ3BwBFNnamxthpBUKmLuOqSd/OMD6FeWsKMW7Tm5lJNBH0sfYA7Nr -sNcWUHFgGJeurhKK3KIHqeMo+Yi2NbYXJ/s3FQGdgUyE+C/+DiRiEUsF3Ej8Vz9N -2fQOy9Z7LgPx6pVVzBzxJxPWAnz6W20rKIDD7Cu75Vh5zJ+TbTNNIFbvFg6caCUS -1/ueAvDy0NSJJle3SyXgO5ApV+JNbwRPTkE2zcoFP5EyLG59t/Tks7U/OxjN8vwV -Tl8FJcfc5tLAQnpm0hxVDph2++o0evh81vXQoUH3atNzPaldrCIrt7x4gzUa579j -cfTMiivakat5dgT9ZE30ogjk0Xvk/phox2fkp3t3GFPrk/H1yd9uC9mUjHww+q+C -Qm1H/5ovWpNNp/qX5npZQOJmb543XUqf6Y6gLrfm4rdL+2GYdrR0fGzaapSx+LZd -owZx19Vnj4NRfk8nHRF8NupffBWBQeDhZcXoFESL5D/pvSoChLUwISv/fw9o6W3w -L4LASrZAJ5ltcG0wdDy+D7si1lDOXREMv3j7tymG2Xtuc7MnVjidFgy4bHz4xLAH -zWBDFXVMIq+yRhL5bYAm8ffVvqWBXQz3Qq3LhKVf4x3ET/bl3jNjYVLMdGBskakI -tIDliOgssk1lAKiSzAF3J6tFN30yEHMfeTaHFl+LUGIfcNMtiVspTy0eRsgRaYv/ -Yr7kktcjhDZlv1Dgcd2Bk7VZwC3wV9DL6uMVwLTNwixsmS+FdVVmoo39j+NMMGDe -wD2PcyQdqHmmnGzhT4sIiezFdatMtWtntpguPjst1imr+58Ujd3D2w+LzE5ajx61 -/9+8ewlb1c3ScuzWS/9bFztK7jjL7ar6aI2ce+eTn70OnurJP3Dp9VBHQlz7RsaH -cqK2dyFcfI+Z3UOoTe504i33Tw6jBvdLl+o2VGPJa3CXhvDjPfCo3kdDMl6g+CLh -Bi1FKuAPkK0y5AZdC6ZbVIpv9EpPovkFrkxngNVmDbrAELMtNVIBxLIm89SghSPT -w+oBNBNvlzdkIARsa0JoYjst3YGDBo6NkLoZOcrUO6Ct8OUtXrXC14FBLpct0yUa -5FDX/iPrhKqsl4HSJ+/FuzMfX/TKGaDLcfL5+x1o5r1liFbM/VruPu+4AsFfiSv+ -Rfrb7YCTgyxS7/sTl4iVvVC0jmpA37mgO+g2hEdCv1n49aNQ93jp/rf26A/fGAdH -P+eGXOOJJyRNsr1knSkmpdOpIlo5L0np7AUpAjI7pC3iSDGBu8JXoK3ciuIY13NV -tvitHB7rsX61GHr23ph360Pqkb2PGkkBILUNF8ZOFfYoLskUXAQQitZ7MkGyDXJM -e2tBJ84BBl+xk9SXzOf+RR7oYL/caWZksUD44bn7o6O5rKlyHeaifb4pRLjyZawg -EYvslYtAgsSFEFFDnAObkA4VlpVpfWzPlK7XTUsiMEpLMPv64adYGsvT9JXlPTKw -v1JOWdVz5hqSAqWMVmXKDvwwVnetVhLsSS4bGBDaIKXFgvCiBbNraURnN6/nR6Ve -bIJvsAzcTJ/ln7u+D8LRiD3gjbWLT10wufofi50tyLMI7/0d7dUrtKgDw1XC7+/x -pNnsKRreRCVqQ/dt+fXMcFBhYJ/1e/SIxN7F4bYJKPfs7yuSqdmnO5mdj/hBZMW0 -YoDVjsBTm8EHkGWf2CoAWYJlusIuiF0YjkLbBxFTdlyZS0EMm2HFrioBYSHspWH/ -sA6x/1pxOoIxHO0EEJdaoU9syOwnUS5D3pnfZyVtxIm1gquAx3BXPIyuXXgBFhdZ -GSnOLzLlQMmCRKdlP284GIRg4rJ3s2kAmGS1L0JXhDzNupysiO+X++ztyXPtYjnf -Xmxu2KqDN1a7JS2bKRGwruPdXqtxmLik+alPQ5UDpuG2QR6FFuW4nOfC3R8gNolh -ZsMnr6dtX2vsM5nfb4p5DpE3ZOIMz/PS30pXC3179pw+NYpTuFqoZMyINAVddqWK -VAVkvH49dL++OFERPXOTp6Wa1t5Mo1sHySMx2kftWv5EHWzw2Zs2c5W9yfT1hsLm -uvpdu38IM3niYKueLpGoavUNYDmNivo6C1eca8I9HxZ8TDhk/jqQD0j9lkTg0Wp6 -CG1LNc3QEerwX8kE9yrNefq0d+1MCTsZ3Hb79pLpVYHZBSemGz8BP4OzgA5rrq97 -t5RUPz7E9vkzGLCdb4p9ln/JYY4M2sXpK5lal/L1enFfPb+2Mk5sRmHEwTnO+VSv -PTSf3DH1lIfWnbU6WeQZSohHQFasqsKRIUYWUaucQVEGMEm56bnsrciumwLJhkNd -JSGtWv+zh8H0Qrg61ehxBUM32t923ZX+TKLkO1dHoofvVpmKQYYMCtjQfuqqlIVF -Uuc5RQzJgs/cWYbBseZMMKuGC7KCwlqwPnwHqoT31LTvAtbhxDHAXKukpzfpPaBu -BWKuZwUCuo+KzaInaaABOelUjdZQJt6zVN5/OjeKG/fEqJgAEvSLjOLzAmDtA3tx -6nNTgfewTqUaL3iWhsz2C0Kkg2pHK75djwA19sSv5M6ehe8odlTo36H+JE7GAyH0 -W7MGXLgjTTnfXk7eZcdk9CaLvl3zZmzMs+9MD8rYI9RGWVb43L3l/56QRB1WL6rP -k9ntQFX4lgnDBukyYBu3H+8byZSwoSspDiybfSBVmU2F7uadTHOQAtP4aMp9y6le -g6Eyc4n7nGRaG8kOJ/pYKcXMDqACT6N6fJaoUx4v+U/6/dyFGVe4cVDVlnGW65Yc -NQT1GrsxzUmKgSnXJyS0YQ/zWrOzt4DUybQ3Gzc0ey1yX79UVD5D6IuvpSbfzFKF -P+8OAtICNXOsPMvYJDDgACnOhs7d9DKeo9gV/ALY+i7CmlcrESG9a4BbIjMhpU8A -HfNP9KHatNzc2ja7RqHvXZL5bqLU8PmUgU79SY0t2eZaTB6ZYd2/F0iVBtPJ40S8 -O6aFtk1hKun1+PD5GBMkhCvGAl+OV3vWA8TJwfr6K3KZgMoDHiZvDe7WTuE9OdGQ -jtkwbpDYmmQilANNli5xiTQdK/B9SDYOTyhYzdNmRg2EF1BqPydl8cMDonGAz5GD -/oDr42nc+KfcIUvWFAC2WqgYn+CABXINY4CuoL1T9a3IDkgyP2OvA+Il7st34GH4 -aBRhYHPU6+d1HtmlWlZ1vdMJ0a9r6dV/0od3lfnn3JjA2saW/3aO3KRLEhdSvkBS -GQAnRaHjOka1maSxungIdLFAUddk++fPPLbnHVV7xSK63YueI0EVfxw42vP9gLxY -9K7QerZyq67vWs/qY+C1P8BylSv6aZgloRX0Kk+X9bV09t5Mo2c7Tr69Yv6hVJti -5bIdc8CiTt6nBIKaz7RYa743r/fAggEIpFAU6uUulnl7iqMOTHsNTcNTZyz1dbbf -NcC0F4TuLFNAaToLDjg3oQRni8LXG8wDMUcnC8ddbKtLTE2mVNg3QOnshZ37BSwy -JbDsJ0wNcS32XhEX7N7fl6vH8jRZJLsEUwyZWev5vB/BcxIN5iAV7Z2WWyWlB1rn -Vg4JqQaXc8jqhRWW3RRfkvblaDTmKgAPsthiJTTc42B8p8IXkZtGw1Io8cEd4w+7 -GirHyzU66+c6mz6/LRIc93OvKEGJDSbhhFsMQsNDwUPddqNKuUaMECIUbtiy3BqM -Z30Ilaqt4hGRXCmo164F/MEQKfZBUNz2JOCEOfsD6fhAmW+g5mTk0lo/79KI8zBk -ny17zbdUBdezdTrN6mOnal5nNeXvLxB+6xypEvz9sLEO8VgO7WxvR7AalBydhNNx -xzelrTnHqm2esJp6/MAg8zT8UG6h115etI3wP/8ptwLD2fPLDlmiufWjJeaxZZXM -b9tY36ehaWgyk+9M1y+5RbKOCW89xlXlY1FOuRUGQbPN/44sZiJV2kJQ27FQPyvN -ykGkVfibL2WBSSzP6UhlL43miPglpMA6CG8ygrez54q3J5p+cFcCwzHbxrv0o30P -2PLzbKlmctsvXaRCd81/tmuACJOUIduzc/NqYgaPP49sovre4yjL/TVD5RuGEZeI -JFWvc3/xnO/srDAqcQ13lCh6zqXtCpllytHMLm6Qrdd12igwd3E92pwJxIrSOmBy -TkIvebk6cbywRpeetgnA6JDeNnWtE+uIMdvvF5t90RXikKdalJK/hY7oh0byXTCu -U1EDAcaHMqyGJaSq7mZdJ6lt02QUDmdHFfUVSRuu1hIxCff7btNY0Ug7hDmswBZK -i8ukwQv3nELoizwhM3ZYIh2BAVHoncNLcorCtz882uJXtZbMSC7RiZ3CgnCjiarz -xjDibUjb68AHOkS8HguEtCB7SAeUFu/wB+5GrQIg1zVquAieHqUzXfWelwFZhO+M -5G19qomUilZ/3NtRvzxpt49oiqmyshyjt/AYLQDIZmMTFxTR/8ydm0E5Bn39JSsa -lXCmK0Lo11i1lhV/lDkVC2bNXo1KQlm4v6A3JTC0Wo0iJD0gnIyZ7YLz340FZV04 -s8rSOQBRuN1AOi0SbmGMU30y8fuWpWF2gqenIeCDUL7KrUDr+/jTRYl3CgogG/wc -rrdSyH3whxj2/fTPvOsy1cHtiy8u5ffv/cMFJuFT3SVPjJRgjnl3zchMMLEpjYkJ -AufAQ5gLmM8s9GGlFOPQs9ha3/j4lepahkAXNkqAAetBpMst7HRfxcW8S2NT+1Fs -rmFSGvpbzMkMkd6aFKUhCirKwJRiYNB+kZBInDUJJSlVBpBwgbGo5jb1oIPyZsY0 -f/laKLgtyP6AkLBPyY3lgTmJBAfC2hEQOLNJlMO//soYPkV64pvb8h9lQXV3Bn8Z -rKkMYk3SiXEmGzoFb5hRXqHJagXf/QfAbP1mhCtl5SMJ+bFxR5OdKNI5Sovl0q1p -NEdLJEElZ3jnIjHAoj4xRmFdwKa3ajFTmtlPw8v71M9z0rKnmtMynvVRfSb5M8xF -q98EsPzl2lCtGnpzchLNKEyuwtiqDJOlRE7SC1ls25BAJhu/LiEfcOsE53bowkp/ -wa9Y6A5HLQ+0+/sBfJy3fB+ufapFKGD9RKesSxJCNUC31v0vaCQHUmLyprr0Ftvd -E3p7drG0Vo1XbfZuFWnFEtAjPSHfpnXOLWjz27IgjlsDt+JxX+IIe/XRp+Iyl+SJ -I51Azn2KhFNnJ15RXSMU+kVAYc+5AUMlcsKgRMrF3CI5QMEekCByodV8RYbLzOjJ -YV5CptYwGdp8/x7zgBDmCOz88T+Zkr5S1iHdeh0PEUMEHOtUscpB/yuC+k77QrBH -YRdWhrM9vLwV8AvyLA0W5Dx+hHx+RDJXCN3RASS/t0H1bgiUzHTB9gg6YwuNQK8S -bY0bIqM6KlJ2CxN22KBWcN6eJM0E5ljmtZo2ZD09SQ4cYnWjp3ckrY6lqpfiIrSB -vIJVCSV0mPUgDlNdid0aOhl33DiJPT+6vmaXxwvADHym/ag4i0T3fnG1SkUWZaQj -auLTkNodiNNOaFYZqz3frCdFbtkCQEY8Nbgt75WSHRO/tI6/VuV8QPZTHzevlyQl -Lq+Smlt0a+JGLKpkViiQwv4xctaywcl+juTsQSfPih8owVCjzMMbOdl6mo74lSHT -cmerKgp5gtDac500g9PgB4hCWYNsdJawFwWnCa2MrSwWa5NOarr0KFSULb0BmBhv -b7e0Dxfc+b+2qRZBq8Pr61j0dt+x4MMMPjXS3HfvWVpqK8MzRc9fcGKOelVNLuAy -Y7eVkuPrygQUqQX6tVYBrBREnbeU5+xSrxvPbW9whAN6kvpM1Z9dpLKnY0XJ6Bu4 -mm9z3PV9ZlZWCjozxpDpc3Cvpod9RE60E6KhshpXPzbam5TiFT+YCBOTA5AvIzip -QRGYcCzvMgjTXajPdm7pHgAvC2PScuj84SvdggxCp37RAwkQ0eOIz9WedeAclkY3 -mC1vt4Px6Xg07i0tIRtzpi9oQM2bTnHcPU846eNh86dEhFu2WwHdCI+8LK8FrbAz -eeq1fIFO0UyAXSGXm0kgkOEo+LUPzpqSwXyvM63uMww7oPUhaNwjH6HVOKKdlX2p -KVm663fahVOLUJ2Oc/ehjj7J18Q9lgT/5S2Z8JMom9WaDXNx4e0OAT95flSbF4IT -lSKRkscQ1G8mnwNbS12r+RH490RQo0aJ/NzLEx4W4rJe49G+jDb2obpGKQ7/ZrWa -b0zu0f/sFpO+xkvRgJ9V80kCVzzSNc8UyktkUp0ZeaNvxVMG++R30bC0//M3RFkR -ky2B7Hw2TRifYV9Uxe46T6Ik/TvtGY8mn0NX1PsK02SG+6GqAlOAqWpbPUaKMMxg -3A95jU+bSiEeBTEeb4/Ydm6tXqUeW5IkT4RMvr+sleMOshmzcY6aINkRN4mApYpG -a4HEqU3+uaClPEyyXEkV00Qb3BK+jVOEX/9qBP/f4zovhaBnMLupBnzhorgyMQWM -vxNcyENv5B8yidXf/qUGDb3cAgOVKaSEE9knl56xSmJ8tH7GPFXHYRhnQYtnQmsa -r099VD4Jgg5075n+/y+rFnt4fHFYh1eF/qg5PkZcSpkc5d8a6fePgad25TjPPbtv -q0QLgzJOPeBJZ18emPQTVlh8SwJ2mcaF3RlqCZ3QxFPPpX9zzyhOSiTIGWoMrT6B -HHNO4oSXFYK9JhQgqRjMfdyyYJAb/ITxBX/juzyqPFiCPvLqLn31XfmvN+L3/iSq -xPB+p6oZwsGxrcZnkemkAOmECo93bu3t7VomHMLaj2RGTtVzB9slgjaTorNm3w0N -+N6P/kgDknJTH6x7UtR0jiqcE8xIi00B2fOwWfmSYnbULQLboqmLH4PDcE6Fev4P -noaWOrDtz197g3D3Hwa8XuMI6ZhKE3HJa0F+B6ctPlS3uhyZWZJwNjm7BYU4cCAR -lwRrv82AG8Tsmv/3kS0jpJfWxiWnupsHB+mLUIVBvP3kNprafQUWrir2t7B5al42 -vymL9UYcWNIJkYgtaPmWgYyKGXzbriJi8RiCKjCTkP7jC1DdENXLAeWtXXqTmMyY -uqnBHv3TZd3ytZT8sUpoGI5JLQQZO6JwyRUSi8J3qp158/x6C7w4wP8IE9LWeHaV -JbhnR5Mhw7kRarAbd//I1JMrZTLOZJI1dkYROyo/LA62aUiKjALlP7EGgdbIoYFJ -+aQHCn1j6Bl54R2J6dcGe5hxIlfx2gnihCH5LZUNPlZojtvxOlcsTT0gK/jRmtm4 -RM7/0maZO1rMiXOjwSPax3BYpw4mvqk9rcY+pDatkHzznTKemvt29fY7EB+MnOSp -lFTzs6oUsyvEDc0f2RRinoYG8IkwiXs1ZJ+OR+auVVFtdPGYWskmeYiIUwYeVtnA -MsTd0jKLc745zpFlT7njRTNobVFBsKfX+zUVStFiR3xMP0xrjbweCE6yZl7FqKOh -JsAUDsjs2oq1OgvHB97y/egW+wMNCDJYuUL6AT3/HMIw8BiZgcZ/yxkTedNxavIu -U0ejzcyBQErrVVYqwQiHVI/EuUcV315ZdAVHrWH1CHoQcnTnrXSoyNm2IFLCtk+T -nP9UT8pypoiAralBGAu/OQ7TKtHjCSJvdmEf55myHS8wwgaFQqWrR1PJjc6OJrGb -8pydkmKHshrsuMk6Ww1HJgjVjA4qSfk8CANNQgV53JaXIc39uxfuzTkxyVyFp/xU -sHWKq5i2mgpIUYnbIdrutbyq0pfBcNUqswEQWa6USsN05fOfLfuJXQTaaPfaDJQF -RUrRyJ1eEYV3Zt4ulhMUlZzYZU9uU3X2Suk1vsUgxCSOJzKKD9Eo1NSSr+gCzdqD -PB8UiKyuc/Q0WQwcWTH0Y6iuylnVQywOvXDWXOiNaznMMnPE5b1Yp4t6+3BlOUr6 -5GombF8w2C4rYl6EiceWXvZ0PHSpxacLG8xgA0R4Cm1gAStlwbgX7+CrFRy6KV/L -CkuD12XZo6iW8HduVA7tk2lTB9RFMDEfomiwqXWOGkJgJLanjz6RNmV+gZp63pB3 -tNDQ5AiIDoGFD/It5E2lhGdTubXXi3bmnUKfcU9zUN5zIIZ8MJTC6KWHeL8JH1Uk -KGhzotqeFH46uzuWOU6LqKoGProk6QxEgewkvgY0wUCFxfc4iSN7hQSFQ5UCBSsk -BbMqFxawc1Q6U9RMGfi1tDXURmsyJW2az2fkdkTKS/uWkUGKgVabbs4RA+lYv9Mi -Q2sfhl8lHKtMMFoBM6H1Ias/Hr+WpCiy+wqzsALdhxfNk/4GMu4g3LO7y5IR8cY0 -2K40cNXC3LA3qXRPvZ3yd0OwXpOpMtryGQqhNtWm2BPe+2o8KeuohXPI7MCeeKiv -CPmbrrhQJJtE/wF8JqqVv5pR9bpISaUq+QxlCA6sRS7VoOm8oGZ4R9AO0Yk+P/cC -1dJZYc/VLqggIOu9DBXb6SMm22ArHCQ+OtryU0pyEpwQcbG/JvhGvsGz+ztY2PZo -PTRaWh+Wj2OJJl0jpBYjqAo7Uy9QSx4oX6q8R4mqXTSeKARLOSMw6ccdRCG2QdCW -FlmmeRq7YWtCCOLXbKJVE2AlaPBKG48E8DmP2ndADOlIYfWnFrJW5yJ2dP6zXQgT -dfOLBiJshxHzECBUvyX7c824atznnc+jiGQL4zThymOCYK4XXT7wDdc0PgIly6hx -R1N8VAH2dor1sSTXexR786sL9BEgr5QLcc11xp16fQu9+HSafsEn+Qupw5nekA+h -rv1dUfuuWQ5cjlfPk0xmw1pQctYfqoVw2/4P6nrzxKj7uZLfXqlTlmUCLr+YEbxI -2iotSilWBmyIHV0MA89yWBSM1cLxiPEuACtLuqeh1r2sya02rCdn27ZJcB7YdcPF -fskVugcZRwQvwfQPSqzKExSqhp/frIgK1Nq7dThGQjQ1KhNoZP+EZRrobPKI60DY -ol5Ihr/FhKSQfhODcX7d+yYXD5V8rGtkBfr78QFsUny2zR9GpSPQIwPInFzO8Vhd -GmDDfGjHDXYektRcuBe9DPRKVUjrpgsOXX/kzju3OEjb+ZfEL9eyDMnBTL1ELCzP -i6QnRRB10igRgkVdK6CVPY8fFOyR6FHI2aIriALZClm6OlMrEcCPxFfolGLrlIR4 -S0nYhAAd2wbT1mEot+LJ5MaKhXgFvvXCW3j1AU0cP7mebvW/LqN+f7VhB/4M9bj3 -Gsxj3CA3UwZMA+/Ufo3NzhHIIgBywDK38An9uqtQcNWwLxaax5FzmKAGBFT9XfnY -zAazI19eqH7FezzF7/nkNA395PS/+Y/gnlavEjst2zma2Wa603vgx72DdfF67y4v -4tI69ahFtNdVoHlFXfFMwMEwgn9AaK1mfiLwE1qz+CaGN7mRjlN4E42v3JEnfz9p -iKJX6cj3sM9VNEflKEbGtA0jFH5MiOXtoq7+yqQJTzLtTeGjSsaJg7DyL+mBI3ZH -Ir9lhxwuklwqrzHTfwv20ORRXB+vqqEztNG0wqh31Wtt0z+R5s8T+h27uEv3Ttoz -iH14MJbRxEZvYHM71NJchr53fyitdM7rRf7nLYz+mNjTyq7y2ziQwqWxkkKZmrK/ -lN7ZV7dPHnYxG1Dh1tgOeen3eKAL2iiz+hOyp8SsX+HAbGPgB/xT8OCqvASIuHA4 -BYVXgUpYWN9xiHhNp4XFfwSxjWEaH8tLkBDMDUl99UD44aw2evQ2k5Oy5HcgCal0 -d1D+sbplj9Au7vxmK8tzX/IsIeC4lXd+pMTloSGosD1GfSbetfIOMvyfON3Hw8Xo -kvGdIEcklKMwdlmv/wJ43WHPk3Z6i1uwQ4D0KOHdF19R4p0gJd7+RIP9L68UGdPj -Kap49lKaoVotry9GPnkIdeGR4YLJ1X9jw4PunsfnjHRC9tTIAKWve546gMFXtZvy -JnjDogx5ZCSyEAkHnTzGG6gDucDwGmRtgHLfeKBFRDqZYpaPGUPneAL79ypbkcBp -bFMeiZuuz41ugekZHSvdgAkiIUksEFAONTjeXmUj8oIhq88lUpKKYSUhE4Q65HiK -0o/wL6NnKZR6YtMgpHC88HMrb/u9c3CP+UuDYaYpuedeoMIJtOrv4t8POWtTRpQL -jk8GwpcRW3Q3hCtmc9ZLpG5TxkwZHr0pBX5P27I5RGEkQwLIQ+QWadUXnIJqrLen -fgBd4XYF+INXQJWMG6hkYMDs1pafHB+4M20l20RoGAqZF8Zd/GXnYdZlVsQ0zBzT -7JV1zUJU/EJetj3rTTBvsqIA9/MtqdPuA95Bv37wjJG9IhrwB++mpV//Zn4dsFm/ -EN0KFKROI5812UGUyLFAEVK4dxExY4L+No3XZuaiuc7eLifwjTK/CIAN/JQd6yd3 -qtSJXzX1ulLpHJsG9SCvwR+qk4m5vRMhPk+3srweiksGlj5QKTHFwWea4vDagMng -IbyJBC9/BADQHbarQm8bNNCsfkm+K2AsJK6mXVrbFtZsnVt/xw38XNgG/weY6wwO -tK65MMAHdt7uTOPbFOhgYeqRabk1q/uVPdFg+XhpVT9TJSXqVEKLPrpp/viaWjAG -AmfTo3NT2YV/t+2cklVZM+RXZato5QA0RrhBGM/ZGK7Q6w8veQ+e33CzMhrO6vue -EbecGhOs6EjZwVWYeLr1iLH1kNZ714WuPgubl1jzU19qGjVvMST9tPsYg622zwPW -MWb1fGCLcol1FrYwKbb2TSsFrpK/64hZswJIhg0w+rjdyH0MOZpXtyA318FpjRIT -kN8bQZfeaqi/fyBmTMjqUKJWhB9sZj9wyvep6sOj+KMYUfRmLliI/VLcWUakAr+L -88QWpNuVGp08mQfzVrd1BwlJYXUda4ijGWpIjAzDaeeNagwGSrGgoVrVBMNdJmZd -/b2Ipuh7BshM6s4Dt8Ni4T4lVEl2dRnEEIFgnhoSspPZ8q2GrVpq++FKqq8RHKbM -py2UBRRNHGL4WOSVDHMmnOmoaVU/N9qehUZ8JrzpMOEf+lMKk/BwRdbWeRA86PX5 -AxpQ1tuwySzUVZdKTlg022tqZEMHBzeXigr8JeiE74eFt27qBvMELY0MZ0+XYR3a -W7s8Vz5bEn7B+r2JeyGYF9J9MQCUPPe8ukHFLbQPp6Zfg3SwDYbXgxN1YVK6dEzf -esaqWeyilBuDx1xDYkuOqiWC17i3NCFfo0Y7oltCciwcyYQBDZe1AKbH2OUkTeCG -UBvDsrkPdvbOQFDSJNGhij3CMApgzVv0cPQ/ElnVRVlqFDxldENpS5TRVx/vzkIN -OkVxcTqIKZ45cZZUj1Lt/uEtXEWbQPkAZjtdBMWUuj08tqnXytaTRVzUxnU2lYIr -rRheU2gjduV9jf7thI81SC9Lje2D47U342h5XO47U4Ao0UmnQXbDNNR9eRa/94Hp -ZMeOhbcI2ukkbPq2O8UjgyRBX5u086RQNa2B+kzvtDO3sZSuHYZqVlKdGgPxcQXv -sXhBFx2MG+xt5lDeuHqPtqfW8yoayshJ38YruYhRW2SKg8zDw3x6v3rWCK5ESvWf -fqVevQscwoaWzW6aTpHSjA/VsajWPmQNfm9CjRt6ncv08XXpSH1o10Dhzq4QwGZb -66dxWR09Sezs7GQunB6Qvl6rOH9SUDVurhsV3BAy4iTllbZR5QCsWKKkPhhuKMuH -TahkgpZDvvOH8gwAvOhcCcIpPRw6fkkBuuZh0PtCVLOcnWmflQMgoWpZKn+g3bTX -/TEa5pyve/4n4+RMJiVlc3jPk4DwpecQkaz/58euzaNzFdQ4eE6twx/+MuTL7Y+M -75PoFCDjpXtQXsDwNh0UNBYMhhiSbJ+JNHmgSGE7Mix6VvlQ8OqxkBATuvRD8W6x -QJopmjVf6lVUrf7Gw0ULqUmnkESN6D1LGdP44GT6iuTGtZjWozF7lp+Edf3GqMKT -DvBxCqX2/ceFtoc6+dD0+PGS1XBw4s0Bu2W2AqmRkqwD7l/CYDNMs2iRCIMbrSlg -+6/VVXmf1xJBSaeIZ0Ure8EIAK8TX7qMD52V1K9O/mG7S7P/94RChKJxRqtyumgf -soXKoLKjR+AS0WtDJKSGKhgK7zZDv9Nvrj+ex6rbFIagXn+kJgKVrNYpYdhegTjj -4MoVjbgS4Q8n1PKaFbwnAfuSBeUF91NUGzemKhnm6jk6r4yhRTlKlNs+v3gJHbYo -5670UO/2I6ea1VAVSDpqeoreo6YlT/N+enlj0Jz8jttLQWvhiTF7pd8bmqkgfCuH -5rYWKvcsdcb9zX2+odFDnTdYgdmTLfGzaNVmhmdLsjOFhstlDnynKmxnNTWrEuel -dSWir+9aRmof50opwpsDb/mVxbNxKbQBdgSgsLy4OItqwDmKQ5vjw2rT2v4sMWhM -cz6aAY3aICNhpyY2q48KolII3jS9vWfWb7GoBv4KQMOD7qMbYAJ7E2mHfEFtxSXQ -rAZ+Zn6yUjvdb2RNkJjj1qtCIMXs1bQLINLYTIyVa3mmIzosnfJB8KDIrDBMSclT -LPzb9Luv3I6zo1fRx6Ny/QOSomZFtkPgckUpjWMB8FYrUjUE7JWjhjq9qWAZX7Ts -EvDamDla6S2P3ocTU4nv8nDa/Yf+xwqBBNVRtZHsDg5SQTxsT+bpnTpG+9nOTfX+ -oXvXjHMMYPhkqZqBNdMbH4updAl8OAaGcnw70HBnS1lmsvpHNj4ct7v8ezGgcgoL -+gsFcQ/fmr0/RvR6vCaQTAjQpLl49e46Q99UbGVEviXvD3fDFO3UPlDbiYGAVsgB -2yBLLc4tRbSmf1zTVSdERXDSMYpp6B0GxWvxyjI8UcGvOzSb28bdYXFdKxvMR+Rd -kNyTXGcU7ZfkhMcjWJeHZj1vcopgZXvPFAAXQtnLhGrYUTXZJL5aCH+VkU3Luk6u -xQ81LyrcV4cTXrzisGk1UsS+ARVAttPJGeMp+fyKKb9vnrWEV7vX7QFABTACrZbJ -CpU4f1iGuUcM2QzYsZpINX4z9wztbKQpDcj0XtHs5GfovCDEfB7fzcRhkgapq99B -mmYffR6mcOzAj/i3eO/1aBRvorcNhLTeAKyxO2Ls7f0VvLEfsriLvfhxLFoA+Bfj -doS/Q822tIN5rnXoCUj4+THoe018V+l7nDrLunRm3zIk/0j6KCFOMCcEWGe7YTju -6XJ2qX70obQEaOMGd2zKyfs3No3EFqLB6qL4b2shX432xMSUpZdJ0QcjUP0gMVrG -35NeKx/maSjwnqbMlEv2irQvjktukjS9bJfkOI0M0FjQ6Bhl1MmbMVgqod9k4aFI -IgcYhO4L7B1QUAo6HEPhWToco3WT7cne8YJ4RrfTPjMbVL1nmAfI9IYOLHR6aKGp -QohZ7oSHJnPfHBm5/cv4PRuB8olcZkQcvoSd2yyFgzBtFx87vYogmQ9jDNFqyZx0 -hfswYoGcjM0uX0wwL8i0kOLuQ5tDa44uZk6VEFIYf/jjU9EdLL9ah0h5C4PYy55V -707GvDNj3wY8XqqVu9Q3lbMelIW7Io/hDqg8OtaFEysfSEQizhBLMl94hU00lfe+ -I7gJz3MomlgA47b7g2QtMBnt45AgmPM2ziCbcXyNVrh0Omc93kyAR2BtHkXhDW3h -A3VWfpP+e9B0l2GLltBEFvQ+SzkWioDD5Wbt99g1y1R8pT1iNuYMnM1YLyMCNb2C -iUSHB0jwGFmmz4hsVeQRoxfdoQLY3RyZ7N6X1pdHkMRMtGzYQ80RD2R3JpBNieXH -Ak+Y4cb8LDgQGd1YOxPvuwHCUJg7+7aoNuF75M3J+Uhonj2TMVpkWssYklI6u/ap -ApaFVs+c7ck56H8S/ohGhNYsoBxoer4gJz980M5qyAYEC9xQg3RjjbBdlFBbrKY5 -IrrVQiKd8q60wo9cPj+NCLQY1O89UEjqUDo1xKyU7MniGzC0TtByWaD1byydYFws -vhD76bIYihX0HRtNA7W0OeqzEanv7TOMiwlJgo6UKsj9QsvsY6TfO5nIRSbw8sFF -fDqimCNuv9snpA9oJ8CbpUmScrpi0lTn8qG27BL1kMc5/z4/1AU+wESQ4s21nRuq -k0tC6A0+xfViPt5bh//jfn0y/TwHZHWwPbtjDmOE0z5JiFoF7/w4eNI8IL96p2IR -fjQldRPZ/VjQKZSpJa5NnNCfdPmc3y4sv904RPld3m72sTQ4Yql+XSf7oNCSU+Jj -iZif0xhZQ1JQq3qfM8Y7Vm++1Jl1wpqSUpehyN4NuBzq4pjtfmLOqyQPbw4GgyPc -J60EBy/JIuV6qtE23BQY6zEf6V3ZKlw4ZOuoYnWaHNMq9mXD/aOEYX2M49vpYiof -oT1aFA7v9XuXJy4nz4WW+FEx/JwbI7VT1o52SQeT5ndTOzC7w/IJeUmhkZ7/Nk0p -fXL+LLpPktIRYtfwrCJLTstiIsP45Q9nwJT+rQZ5ToBEP1zKgUgCmhsOOuxXgTdv -XWIqPZq3VTrSTSM4LD0YkJL+oDDwP4lbF/qmMfDsgvoi+p0WFQ7OlsQkvB/UeLmC -CgPBOQmZSQ290SXxEjYCQ7B6PKv6/ItureWob44JgEuUEpIb0NmQYqNTOcl8xKJ1 -xdMsDjYOOOCmlCBuWxVKK5d9S0vYmdPNnrd3W8UANO2uconxTRTxgxDNlGsmlCP4 -qdOJ+EwbazGO53ntEzgVzRT18dsPHlIWt5CNbLI2UUUM0Msycdlvtik0bZOQS0w/ -LkyF2+Dth2PC+h4/zcpfcl27nPtaHqlkG/WMlPC+BOM5yxo17cQn4ZvMt2kRrzGu -igcQCImwly5LjwDNhmO9kOvbq+mAJMTuWL+RS5hZx6IEI6iOAo+kirQJ/WPwEtmc -wCQnjBAwz+HEskof//eHnFpLmPecFcBwJ3sM93NRkyrRnebaItkwzocwh2s2ayQc -Z2Y+/wgu7t8RbvTekopRUNd0JCweA8QqqURzpbwkArcuX+p2Aw4BB6/LrzmYQPiF -kdznNuwuFUArvB43XVrdb9eCPNRZBUjbNqV9uRBqNvgDeNKgX4JHVgyI7SVb052a -KsrdXBoj8HHb1+XF5Nrw3TndbIFnm9UVDCfnqpRq7pbtHp/i2PH38WekImPSnbk9 -9Tt4g5/4dbpsx7Nv3TuxC0+xcKmocD9arveYWh+Y9MNjzMgMxYumGI1+ft3DLkhl -OEDwD4puesOD6yOr6trtUzi08wSmzTFoN8Q9HpkYc1ToyPkPWhN1OaxhYAKLWKvp -KP/E3Q59MMDLcN9e79czudpsjrH4qvv1rOStdp4lz4807xvXmL+2z3GYIezTIl6o -zD1U7b4+7ZHd+u53G9OLSPqekdWq5ccPuasGbbzGX60tLgmd6HBHK4p+dAOUCR8n -nNCEvFpG4Irg4wvw8TwBluBCnEY325rUYVffLWyFBwOK31LFXaqq76iOz4iYf38f -RheRmg1qdGjhfiAtHFirpVBslSufXept2BRHwgvsJssheh2xk5+sL++4cf0MO4Th -rLOgMCzybmLZOnRE/9740c+TAgG8irFsKrUfdWQAEy7S0WzDkT+CjKaQRfx8gi1z -ypB8Oaj6GjyHFgPBC7uL9QCo5jZ4/pLO9ANmuU1pYyfufzvcxSLX8431ndDFRVUW -iM8yeQEn43LoeA28UvAA2q08KAISzzNq0/EBayFSJydt4eiE/aWX1Ij8qigmbXZn -gTIaiL+p46/NbQkNS/EBL/V5xaFCMFM0qu/2TAiJgepoxhGz30GSU+ZSIE3XN41d -CLEAyqQEEA8JCMbwLqA6bACbeGoZVVuHMhg8HyqQSne0NsDrVbP4wVm/CQRt5u0M -4VXuiQK4BI7FttXr8cNAGcFxVTrcjFPCw/SdjVmkY9fZS0hb7ZAc/rqszOhfYw+Z -ANsKaqzFnArE3bzL4l8LRWmy5xOaFxLVuVEOLk5lsi6Hq2MafkSFqGMWAT3AOlYZ -B4Qgg8xxBGM1XUVGgakEK8vLRvtiOn9/mD4ToONYJ464NpnFDKvdvzAZOhHQY+KB -509U6ifwn1AxN648HWhKz2xibG4HSXbAWGDBKw9Uzo0yQ447jQy0Bd5D/ivmYV7J -Yc6qvzSEI8HCr4DYQvSR59HMITlj0RMuFpeAxe3Ngq91paFAOHhZMAsiZ9zigfdZ -6hoMqOujYGmzm7TbtUorQUok7quUFPhPy3A8+O29lakJ63nNqTj2oVMxh9E2i2ue -oMC4QdVuirxWJJDcHofyimlXqtK6TruqeuUew6XNjX0F7o0HGJ6fXxLSh7OfiTH8 -NvvFggzHxo076UVJNxh/fZH7X9gGmyqeeGOIBSoy+30OFW0CjpuN0R26JuSouht/ -nEO+AopDP5SIImFpfJsXGH3qf1Gk8EMWLcQMT81IXcbZeAG/SWF7HX6KCbluLYqS -J+GhkUVVJCiKWG5oJAaEUitmqCjS6y6CoZrctM0tLIjVwGfa9Tn+ohq45xbZ9tMu -pPdubPp+dqMmioqBU2FRzwiRPOCVSo8H7hYVQk0Eg/anBZlI9Qu07i63REK7WICx -q1JXUpbeEsa/tIB8tcmLydqTH0iUFHwsQFeC5rgwnnzcOlPvGTMD+BxZuL1ghsT2 -8rxcrwFy0/N77jAkv8iwqrzUj4AmGMl7kX3DWkn3VhueVtlNr43FiALwu1hWE6Q6 -w/97JArGodemFPyH13MyY5L2rIB7rBR/2CSyXZkBmOf/hxFAS9/OPTadOk5TjcCs -uliQgWEfqy3RC7HtIoNkRVDZ6neO2D/Zu8ZltAy8m5Iv3ZQmsFbzosuFQ4z4wpLn -gGy9L42pnaFrZTACTk3Yr9MU3eTHaLDmMmuyCdEFXmUm8ReJm3sIAAH5dqUgdSMw -Mr+QtdZsexWm37jKSkNVZ7LvFuBaXLUfDt1x3SPOnLxAi356Wx5rJpZ63WoaNgXJ -vzXoQ2mxWUduAzbbh3t4/4n3bsiA9q/RDuMsHuZgwvzgGGSuIHESUl5fybcKp8eb -WoCSk4Fnvw/OhtOpnTMejHD7Z+g72w1u1WzZkKLYKARAje2x5kmm+v+hw86JxvbJ -lONga18B79yGyNiaosA+62vukWI/eDc0/QYAV1jTONon7IpUpZIUTJqkccXYPR0w -jdl9QNBMLoY= +U2FsdGVkX1/dQmXuudOK9uJaqWJeTP647AagRJezWwFRQK4EkCJEBSRNfmQQgjKc +DFfnSa9YpJ4zSe+ecwfWZaqZUryyHC50YoGjDfqcRNmJ7Jw7vKLGqsM/KidA7IPx +6Kt1r14wwKAbZ3VadYWLnKk+CwGEMHq65wLSrP7GMVSI59+02/WpJuEjjfAXFb29 +t78+d5GTXzNDK6VgjAB908YIMfgmSeVfAP4IeqH9PaqoP0ExBYSgV/TiQa3L3nQ+ +Js0EwUsfroEk6/t9CfbJl6ZGLNPPZJlIyAAHmd7B+MKgb6b4YsBlc1R1GgghjZp3 +d8HBAahrGwSwup4f8nJMGZkDHUFvOSfQ0lp147zhpG6DkxtOzluwJZTI9hQarwR1 +o4V//OvnYxBbXybrbh3fyDt3/r1x0f+RMjQ6nUyDIQgwNWhoqsvPoQutCes63OG7 +AwRVYbjKq7pMKPWn7KOttgBJH1Bqka5TPZ9sEecSkB+wz7GvQgUrbpBYCDPcR+TS +a2WBArd+OzSYJHCj451kobTzATKPwLS2Tw0N93YG9zZ0738dydcIV7WQxQC/vtQ/ +OGQ6/ttLptEffWkICNkWpRLUsdB2Ih8HW7i9q35ynYmxDollbi7dRzulJiqbGre8 +yUL/a7qLgSD0cwIR3tZeV67YxCKeMP03fWEhrqkQwDH3tK+fYe/YeSskbqvlW4C6 +450ekOKq/9XrtenW5UZSesulndB3eqai1RdfDEr42MtWrYAV4cG7aPpy1E8JQkbs +xo3urfYeUDl7hpe+i507fLkoio1T24E1LpL1ubnw9YVicqR6kqGvuf6PfoeOHX3I +mlc7L4X5wfOKIxY8TTPjGUiCgAWQd/AfYDyo+X4KqST2EMUgevWbbwdCwxyenRcS ++8QKwCEvkmXMyeH8NrOVlji4RJAi/NUPmdtgSgKLi0o97VClp5B/yAm4WB6mJ2DT +GZs/FN8N2z7Xw7dODphsAcKwbZjyXALRJ989+Xeh+q46UNumNjePtrpzvYvS2hYA +qk917ml6hRyPiSufCbvtDGeOI0rK4T1Rur1cSx7VknePudzfnG9wbKuJ5Q38ZlKX +dOBRCpTmtthdBG1iCJ9o1av+QTud3+/r9/4L+3y9H/HA64iaU14EUs/rAma3Vyp+ +BHfnCOuZRkohEl/BCpVB6MwqK+H6g2Axh7Z8+HgsNokBgWub2/wmdeRb15Mxt77c +iGizz55NkdJ/Qv6QSzxVbKDG1aH/zhSMo0hIZzYJZ1VSIJ/YfFzmHDpRKAc9UnKK +RoPYzJBjyJ3vDanvgvhTtnTpmvxgwUodA4sEexHw2UTP3EtClj8Pku4zJnNB41Ah +Pkf5v7ondEwaBdzKb0z9MWC01CdnnJlNfjfYvwiQJ+R/2ziyhybibvm0Lwb8zTy6 +Lk8aF84G+t2zyLVhikQWfjlQ1rIZyT9WHGYGC2DtuM7fextilmTFx9ZAEtVZPQLk +PY+xVTL6oUpu9xF22NDAdkiDT2OBErzRB8nzjSBp79fsPFnmgBTdsCUYaNRc3VK5 +4dkEkEsWSDH/g9C9gdHrzUer25LPPhHNRS31z/DT1lZaZX3zcmBIXY9Qx1clwmqt +CA1gGd8sOnrM45JNzPuL45ZT1qfhjHpW08y/n4e2g3Kc1A2u2Yxq9YYjpdvXzAhR +OHeR+Upht1NN0paX3T71upRlZoIIhY0/81DPippQg5NjNr9R8M5TQ/93oZ9DFH4+ +36qdcuYkqfUwAc8dT9bMFMG3pPKdvqNs+3IQ/390P/DJtnBYeRkIw8VSiGTV61fr +F8nwihZD/LKWitOSJny1MRHBe+UOgS8q9vk0NiPoPMEKZ3YJefCpOwh0/mnfDb6E +EQjec9XOQeu6iY/s53XccJFF7UQIL/zo5EPcLWkabOQPXSf5w4HeCJi24I/wv/wZ +Hd2rxa6hAAmSMbdwFjdAEd4Pqzd+mjztEaXLmxz6m6IOc5aElLAYd8Cz/5/oArPk +6R2CntX0Kd3HfPLDAuE0E1t9jV/3KgT6BV+x+bqIzIo5TCx8vE4Wy8ryxyRomk00 +FF86CZSzBKwNE2sPrE1fIVZtifn5xc7SCpxaopaN6FK4t+R1Shcio6tpMDG8/t0F +TY/oIDuxxtYIvs6C/eGs2TxbxvjOls0VdZOb00C0oPkIM58TOi815GcdK+EhxuNk +sM5YSh1zHjR868A+dHEinYLfKHPFyKTr2l97rxThl5T2i+jnbxiF7jS3p/XNCFoG +yyqJFUXTws3xPH5EaS4PEhURu7uxrJteie6bGPB0qDb1QxZLyRNaw4zdj6Uf3Olq +FAgnCbbHm/MMUEWWzZ4K7O1A3IZPwy81FfylKjs2rdZW/Nho0fxPmo6oI1VeQOB6 +M8b9anBNtBEW537L+3noJecitGdRreUijS54qrrQOAV/1C2tpIQlsF9V+JbbFV9P +9KDnYcDi7HmPydehAXHITMjhaArmuNfJ3WGBEV9T8Cjan1R9NQtmUHMEuxfCHf2M +RXfb/Rn22iNsdYmBELJJ6hF5PdKp+Wi7R2tY4auWYCpAlqp2qjy6x7miTCXrf4QV +Rz1gHWn71P1FyfTgap1M4ICqdmZKFPUIrLJqV/on9SPT/KPM5q+W9sLiXo+K7K51 +nbNvatChQ2bTe+tSFZ2d7NUTCSDXH1nHK4DtlWQ+FEVCNZrqAxbqhFCSPvAlTuz9 +eK+HkTvvm+JXGBJLdT8VHvffNBLfakkLf6OqTqLu/b5O7gB7A17mYxX0ifqFNvrB +51x4dPmhu1vs63Y8fo1vaGEOx4BrS08HZCFjP0jPWjemaffSCqaO3kDgi2AmdCxt +5M5JEbp9U05aBoD+q5KsgoTR64sHadhZx6zA+hqNMzsRCG4eKTKFchdbgpjENrWw +KzKNuv7CXDtdGLIcZW6upozo4z0WbcsTHQ7G1kH6YFXZnrxHXFHrHUMnNaBGf8MX +hvcnZb2w7m1s+gN6frHnHGwJq+l9dHXVOWwHGbiqbiWqXzZh6SrfRVFfbd6ed4aS +9E1VzfWCG4xdhRlEPAqUBvkDbDsxoaj1vXxwPwwqaKRBbrbCJ8NPuXO9b6TC0yEG +74h/DMtumEIVLXTwFWJa1au4ZPMUXaCFW5nUJnrE1hYjWdi5/ro1Cdq9eIDkWVTQ +0xZBzuXKsnNi6ysQlzTZ4GyOgiF4Tkd49nuGwgqjCPJKauCyVVBQzRxfpWYWM1tB +r6A5JWz9EMRJraXwFkcKwP+olG5/CsO6eF+A/0mwjdQlFlQX7x7meLhdpTT8wV8W +/yWqmzhSR5s9su/Gd/q4hdWBYbbiEQYUWrQiUUSLUHwyuF4bMSTHZOcL7YOjTRjt +SQO9/th8EWrs+X8qV4ElDlEEXWjLcmZ8juPfpCApkiGxmc+31W8IvvrODH/KLuJ6 +29PYVfgugaGCEgIU15xNHLgSWO7IvHHZhXb2C93ykVlabee393TeoNGpRZH7TFRA +3mZPz6OlQoApvkyrmrlSWajCLfXFlWAspy2NQAValZsaQhzRNM54DB9XV/RWoHob +26lrCL5iqTs3POXZdPbZHVzC8scugXCGUdVcVKEG/fnAuUl5HE5AVpxt4m/wPj1l +CMo4G7U7DrQo/+161VlnY6zELsJL/8MVeMIzoEfWe+Uw6uroNFNg04ZCdS6IacYc +rKwzPZJjCIQq0n/NLlJzLgQFrzf1Hy9NyHfRFcguUegAIcOv5rTBlO4P6OBSdtwl +3JgU4enrHHKkans9S2Vkt5CZ5smdjv1DUSG02QTEpGmYhuJGxTnUwymGpmPePDtD +o8xLwLrb3zi0ht34dGWenzBFX9QeEAFEAyA9SMrwPTRw6/xL5zIkgRnC1yhZSWz9 +eQYnQ83yQIQQ/mrOrTOBp+YW3AL54Kj3VgxsXRIyqjRMX9Z9FrnNB3b+eHbRbiEo +5al4U1wEgRfCu5mSqq3wDROE1Kgsv/0/Ju4NIwatfSxV4S7l2NfLhY4gbPcx+qMU +CKNnLldJBgl7HappKb62MMWN1xx32ORwfQ6Dnwe5utZiidfn27lNbGjZLKXGBoWj +C+/jyoDuxh3vvNuW6Zfca2kYhKcf78xG5sMdRBB56a5yDBdvjmM+TLhNRdfL0bch +varecctAk3wKeO56Hstg3xjmjF3ItdoCiQs3JgJPUcoACGsvnWILZKKCJoa7/Z/U +XweFl9703N9qk11Siq9NFiIuzHsauNkPprAQ8uamC1JV3dm7L1fNVOBbCvTkkZ0K +NOwIrx//OAXPyqlLR9SJ30LkgzvmbtftyYvpMIPqMVgq7qjmebe3YdonIzoAJPwq +ggfaub3yZNLucKwZgiOqdW280b6k8SEuDDtGYsj9v5NgwKrpaeQ2z7oZPiTUMeCJ +yNC/MQqhXC37IuKbLywRKipHrcNKpz2v3bLpmPKIztWTVGGy52+HeR8NR98LMucd +qnZsaGJyi/7ShBzPI+VR2KQU6ibIBsnjCF7hqV1wZVX+D3zSCRXsimlEncHLjDWg +9xvWNuhuyYBqSZYyFVz1N9MjY0QGH5SQWC9T359LTyyHR+E02ClCBITfBduHEqcV +yMpNUE0dXDLU3lxx5BiiswFfEnFtqzKaXNbrFWCy/Dn3QiXw/2nbpXG7ZvyVR3CW +FKEF/xtIn25zhgTL0o8eTA8sv5E25WHhP1+UKtiP3j3rWy+6alRpa+OAcDWWzEd9 +9spR9Hy7iYuWNHget5nsxodxttKduPaA0MH/aM2N1b/dU1/gC3bTX7ConfDWNTlX +Oc2ed4frD8vrkmeQ+12nkWPR5wOm9ZRoK6tJEbVBBcdZhVhzZxXC7RX+oU8Vpv3o +KlXGgMBL1VzXNJpTSzT6QK1gDNx3wt1Jh8wPsBvfkyV7/pTIRys2JVFkRN8AdG2D +lTD++e05b8Y5dRNUdC5fZFqqi1CnYXq1rRME6Tscqce/dBVLl0TGhVcdAhTlmvgO +l92H5adIPVav6hsGMtlkEj9zhp9O+OcZj2Va51ypvJoDMr8Ks1QEr/gTieRNcgtc +yDB4g+i1d0jb4JPdLuyEkj49fQgJyOgFbHS0dInMqH8b8zq2kFbt6Woj7IEktkI1 +T7JhHovq+aL22kelfiqDaLmGz8Ah5uT/TUL9eGJxfMf8xCxLt1mERbuMNjjUQpjm +jNRxzPhZwT5zrFllxw0+hN7rex6f4r1xhdWvxMVZs1pbbQMU48UFN8i7ANYx8qC/ +oHbJ//N528Q8VhF56Kr8W51bg5rhPIUcGWSHmUKhWeuXXrBSPC7+7/sXRyxfxXml +kqotKcGVd+Vmbhd1jt3oVzu1FqL0udJeEkJuWdRlPSJgb+f3nVScLDw4wVLLeM5X +2159lwvblqqtB7BiNiEDEeI2Jy4Fs8YxY2YNp2/ldpJc7jKB6VQ+H9SRn5wpQifF +TOoLO8C/ubwoNYSEO/UePuYVLDFX4KsPnZ2hY89N31RBBdhrOgi9oLNxQfOzWzpP +RMpjX9a6aXRGfqGGQtvDi2sWgfLvN8iQf/slB+z2mckdbamtbxOry1riBOVjdeaO +ksXhh6fq3wa6VFhRYkyM67zo/gdwAFDAwmcHX/UEeFqlhDqT6z2UTDCr3YBTBeca +QddvuY0tGI1ql+TQ5GCyBAXUkUR2fZEQ3aVYbOHs+YXyMH6iLfgLLo0Kuv0cgAOS +huJ62BtfhkXzBD5KlYMNBS6MBkKFwcwOQ2corgG+ViRS2IreIQlKMIAt4sn9zs1g +RsJVJ13Z4KOP5b29C/NhMQRNzSOFHFewc2uNvuNuI74IQpZFAVRwDm7kxIdOzDNu +8Mb1jzbLLsKGfqRpCxEZPV+AuIRnMdog2+sCnkMDvbdIgulHT/S4WHx290dO2HrV +UOf41Tc5QcWpDwnG2cKLMkweIdh6HeoW7y69FRDqFho4dLTHTPvc/a37t6SVFF3K +0O1vyDLK5F39GTdW6ROdWjd4IdjQ4R2qtLoxUM+gwyz9J782/AlXcSuc+8p0XnLA +nxHCwf6AmNgoDmI7a+WQiaqWlYwtVfscwnjREqvAXCtqqTkEg8wWOhVFIjWqcPwp +1fmzo+XsSDY+uoAXMtdSIyAFjhsmP5XZXASuvDR9htX/1iKI2imJGj+KilYMmbDu +wuqfLtLTytV7WOjI5W6qry5xYlYmcdM586WXEro4p4A/6droqsw58czGxPtpgS1p +k3cWM8qNQk4DflOR3deaq6nu3wUujnt1QA3jmTQMGSnVbVV/W7kWcLeglsYfr3Lq +9lILrj7TTaeGMsJlCN2TDN74eHCnwdjsJDkl8A+Te0TZm+HctwAIXTurvM2O9CY/ +bZc8NfQyVyVBqkzFViXrsMQ7/s5fMliIJRx8VlXAwl0Td5GgwQKP0hLILZG0wpDA +rS/u47IY0LYwXMXY254dQGXUdWtPUnkjb2EpdMbHsfX64NyBff3N7kv4wJENpktz +4XFtT8F7CCldM1DS8RcejeM4KuJx3FzQyabocKdTbJ9m6g9AUclXDz49HoeCw5+9 +nQlkzf0nlvYJNaMzsxCVpG9nZVfejFLqNilVH9TEA5wSHyRqY4WXBpI7ypUlDrQS +plFGaGHP2cX+d+36ZfQ2ZU0nizTFL0aLB5uDhKDa2BJj4Pp8aPKfhb4Ea8+dnx6S +QnSduizbVtfEQXoyLu0ICQcE/p3u+9FazKF7QEgQzkeI3MahoyZIfDK9A/5gDv1e +WEvttwvk+ax7EnWRx/W0GUiCHE/L7qV98+zErMeklcbSWwhI5CuqUfwq/tAyRcv9 +faNKX1+o+IuF7tCApQmiZ5xTqvnatM0VAi/HuIoVqtF3V/NAEKQ79rrQcXW30l48 +tGTWNo8+BGXhFYxs6O6B/RBGaMEnWXGX4Eel9k3+IngoDK0BP+fDn8Zwt2bYo9qF +Q/aFFPKItfWSXQ8t/ITtMaNRkEPgwP2O98NTh14bToOaIkwQqKIA1484ajya5dVG +HFJ1LbauzJiEKXv9xIPXxGFqcnkxPdmnnlfpitvvJyptbOvfUIXid8SpOF8za28g +HrXhw9At7t7uysI6t7KC03Q1pMuT7vH4VHyqQHPbTLnHaWhSPG6lv1XgOpax2Tsj +y8zoj80GlOEyJmCN3+aEFBXzmsadz8u3/2doqO0WdxTZS4+HIk4qOUaUk7LDxBOq +qEM8KioAAE61lDc8wTtjB4r4iSCkwOu4/zoaYoTbBRPptO1fbVDUUI9JIo8v7F4B ++i4fAZYBd4De6TUnaSVptHanh8IRI+QZh2mKFtQVI4dNg0kUFmCXR/dQD6UflZ7s +uzOzjKOLwM9AmsuFJh2k6Q7oe1dy4wsggfjlOJz+JwbItiKCx9c0Tr3hGf4M8Xay +EXVPWdGvRZDFOM/aVmYDrMErJNPxUtnVQBAzNUgnMqxdEFHhg9SkUHMai6hBBACH +NU1YO7tj4FJRLywfu+YmNJAV3rUecmytfkXbQLzXqIEE01bQliQ3e5n58CyBoPMm +bL4p0BRhEiA6hw2jex4c4u30sl8CaGnj6VRVpyTcMyNO8Ge72C58RUFSbm85vHpH +QBFlPs2eGbyCw3qODZgkXUte3CGWu1EBK9ri3h6PFgfsOz7euKrw0+pc5K4jfkgy +IGkOODvNT2rx39YPR8T2IEw3tyWKxJkuxydo9QOV3ElUPwkvhoh6My10LKQNkXNM +q+ExXJJ+iHLVHOk9e9JNI5fL1rU8I7jCL4V9RQGc6wuAwqp3BzUk/W8Pa+COTzgN +qD5am21mQa0qpb9XfNqehs8aINRhi2sTpm+g5xkZfDJxQGyI8qaMWCwLoMPe0nZU +dOOimc2QRZ215O5J4WIaAjrOmrBQqzcHvrISmJ4nL6Gfph+C0cWk7mx2QEWD3zYr +pvU92KSK5rt+bS5g+MAdrI7NNI5M7dPooFd1xSZ0ZI/UEuerH7bZjM87Oho3GKtd +fySGNv6YnGDx9/EKYRZtpxX0FhHkXOdPtDenO7ROdbqomAaQ0zVZIPXfwPYgOKrQ +vIGdscOkcChjdCQSigPYieiR9Z9Nawg5auUI20seu2/J5yRNgKsDANNV26H2lHNt +X8O9CL/trV4BELP7Qh6/2bPJ5V5cyzk6sj3quzxOvvenclfXy3KOqIiDmeR7qbWv +1uK7wgtgl0WUHwN01PSPCaUEb6jrYPJUuusm6ZDheXelfit6KtdXWUTsQxKDCIXL +LPpFtYPCVH08ds5+YvZ6bOKPbpSVH63ixVFlMpQl494OzjcvbGnOx5IkBXKKD2pG +ONnSnr3FeBkrI5jd3uqFVZ5gQ0I7VRfhX8y/9TmAjy1wunw8VdSwWZ/ds8ZNNUfM +TfEwr6mwU6vGquduyGDW/ExJKIXWBhN2g7kmxbp9m72FxTs9NEP+cWETkxL3L74P +bQMRKo7hLpMRgibuHJYu7/6ixoylIp9LOZcMqVLdnRAzEC94HGOSogmWVfxScmUJ +z/9iot0ikASs1V9HXf3GMtuoQ/LEFEHpr/a6BkRyt28tuC8wqtAPlotln5K9rmLL +wTpPK2cfFh/5LKWFBrw+2TUGAejpw5BRCWy4E6WguFQcKT9MD4Nbk/SKLrgUKOJt +RqxTqDF20bJOePOlVuvqjJois7OZrvyvPf7fY8enCA+ETYnH8hDAfvbz7TNo8AgB +GR5FtwV7mZPGd2391KkOdr3s/WX6qHVG91bgHqsPe45gSLgpW+u4d5Wc0zVRU+a1 +HNUl/djNi4XmGAptqsar0lYnEsHgjCJx5iz3+wuqnVuVJVzpgPeMsmFW7Rr/5arO +tSBLzcGMPBHzfA8/l0+MyG4ddZj+lciQ44vDnEGiUU3xyEoDi5e/IpmjHm+e2eSo +DI1WTPmGZ/PFEFp9DTvDt+SXmI3YjJjnGHtSOOvNMgD3a0hR8OwmtSAAerSkYq0b +NRCAoTMPQP4LezSuwsNn092nTVgGr/MFNfKwiJcIQU/ZFa3kvIa9T4aUICw6Rd9+ +p6pUz1rP+S8xx1Ipb/fHcIp4+XYSEIto17V2azovu+0tzlc6MAXetJ2Vwq0O+pZp +S9fj8mO085WuxVR3TEGQS2dKseSFNCn5FsQRiA5iAfxH6zxbTK4oLazFpT9ovO5I +3sE2KAIkZzfg5Yt9erWmI5dN4nqXj/jQuJ0HcjdNZOY+NIxeGnBC3TxbgspA87Sy +nUqIUshyNd7qTLJ6oLLNFYm5LdvYwXgLpKYq/fCIGSu1zUlLqq6YsVTgQoZ1PF+k +PfPBOTU0AMCbuUD2LUOq6Yp1H1USBkUPeKuBnyYx3flF6TNlz2fK1Bl4cbUt4WZt +9kd1i6Itvp0mBQni1Tx29KQvtdzy6ffX9aqkEjh7N6A82OwSWI1naJi++rMiW4Dq +BOeeJV1i0f1JEZndVSxHpr7GVuEWK5XDAJAeDfwp1xFPf1Ct+/rdJISiToQR2agM +SDkluezwjUb3UgELiGiXxhTNvDaMQ9sdix2hRXg0PJOrtC86HuX4PNnClIzagJEv +Vzb0pLO6jU0zkI2fcVT7ooldy03on1+3S1Enm3OHV2QApQJgcKtk4+v3qQBu80jq +kNPuUDJUrER1nxT3Ehf3cOZ3Z5p8HqTtKKC4Br8jV4Qs/kbhXKvedtjadSU7U4je +fsp7LimVozF6YA1bpitff0ejWCITA9c17A9Yv3IYQU49XLNxooJ7IlFTPmmjGdkU +V2ubbmwkAoHyy19FySqsuDUwCQ6S4Lb3Lso0kxprNP0w8QP6PVbUT3qFDdvn2hg7 +GSQBafoIQDhYG2/tOKWpayLSro/iFHSlHfoKYei2UBi5ym4qFQ0Bgc/A3i9eYXsY +3nAbi8xla5OEhoLOmxTN6DAqGAWVlKDPpYrWR6DTKjG2yONG3OY00xUhy8jAmRpO +rgxyqygHPFcBpUjTwqQE9LvIj8af2BIlI8iRcqo3vqwPRbuwTwNkKFVgyWhXoTEV +JUGcdDXHkYC1NUHBOs9lYLR20MuHaa8ue9iO7b7eI4sRxwJT8byUG8ZfoUTP6har +QMCddHAVbNKcQMbKDOb2C4UpX+cMGxuPHRCupGxX4yL+lWyuEYvl3XwE5Xoo9yjL +ZrVosHwHNRZ28/xLK1ZwFo1crsDO28s6E/hiw2wORGALvrMEpE9872RFuhUmSKG+ +/pTnHEEmGKY+8Y/ZqSf8KfNE5lbvH2wXon32NdX9M8lheXfYAOYXgi2Z4x00Ep9D +vRzqxJOZrIqwmwZ2ILot94oqQhki5Y2+THfUCBc7IG0IL/pxm1GaZhvJxjC5n/xn +P9eS2W1CEIJUeQtSJCjumTNFIWwvJST1AVWt5GVjqFlVT1qqciQM63Rp2OU+7DMz +/CfV9egGgMW5q1dLcLVkyhbDBrg0lqzTHs7wOot1P2DPwZK9wi2LW13q8ZGWsISB +16LjJDeQlRnUkT5tVrse8UnKBsujm9Dy+NOnkP8rK97FD4mHgGDu2+vDz2g6kiYg +oMEuUvxNCnlUM2oa/mclef7ULysMfznu8/ANkWikuEi1BDIhj5ww/8R2N0nwb8Bs +rpRBK/ZojohyGfc1K5iLN81ACHEIy7BcGpYyutkSFDVlUhLKz1+bgB8ZV7VJtFYT +diJu+5yB4PcHNQsF+9RJOQuq5qYEfVVP4JF5k6degKNPUvHqJSgDGL8WC/eiCCkq +MbR9ueZwkHW0LCZ2WGd8Xb920PAAJic8tpvDFMyogZ0OmdwZhPlI8kUfV/STqmmx +N22irC+BB4E0oShiWoUK4sqE8vloqEOLcjB4ijaCNhkBhEtA72jLCMngidMChcqq ++92tHWKKgNccDBF2yaydlreEwhB/bx7yzxA+e/kCr6tPxb3JeuOLvbwvG34D5x/5 +MjRs51RmVO/JE/vsob67PosrylzJW/NR94rDP/knxEzb183sRWsAEUffBShWFd9s +ee7X+4bHBraNz62P1CinP9I89j5+QFXHWAtf6I4cP5RLyhYnI6yFlvU8i8AM1yap +visnzlywV4zf0K9SpxqpX0a48kWUDLwdK+k/or15jziQDQ9lXTDhjuGvK8eDc39m +ZZw56RcKeu9EeD9DnAzisKEucmKMrkbMLUi+huQMuPRKlE4vIUR+/L57BR3D2fN8 +BTp2tGIidDzvYCndK58KfitmjFHaU9rI6m1UqfkZBQ2wt9JIbFBQ9LWsSKTaEs0h +usPT48tVzcxpz0u5ED9opj5XRa76YkvE8pZjHGQvYytx68M6VmjZXPJgVVByBWEc +xoXBSFBYr/OrreZludUEPbSbjWVtKpwm0V/Hv/7xO/TrWpxZR3Baz55EteTrP+ia +LPxKf2DC/p92jor0ZC3qKvXf549W4rtwU8jfX2WnN0WzH7ETjqzitjl+RFH01Ocy +Ji0xN7W+3OSKebR9xkXCnyHvnwRBmvJtaAh/8uW9KNX9FiUkABrmFzt7T6luri6B +cUnyD3hjwTQy00dEZd8XDqPE/AP2QkNtwl+FE4sAhGosrk4OEqvnv7adhvftHAY1 +2FoRlF3UGLe8WLEbvx1Gd72o9wUXPjhNf//Xd6tgB5nyzzjLOkylJdxyJip7Qu3b +8dkwO5aWtPMk/fJBd05+KAK1RjkWWvGrfm3TU17/e8OqS4NCwa44AmiR9hTOnit5 +0jTAvSidad32EYzrwlsG7bHuOgNT0+Y2OyJI7CMx5j9zdp17h3cBCzJ2eXaEWD7F +28ahJCryBSPODec2Gq1vkC/OQPvUIltfdz4P4RpksUbnZPYRq+yIa1Fnx0Gd9qea +9upeTFiQmqX7FgybQPdeGQMgLO+AaDNtgDuMmwG2tRqtxoVBA8X9xK8zvx0Nczbf +fB9CmqiD/vx8s9fI2HOVULzbpOFgZdGK4vY1G2IEvE/9hS08uoyH6uuUWDC/5fuM +lrMVUju2iWgtz74XiPRmU7h2CPsKJtPD16qKlscBEvd6Jm1F/cAbkO1MR3kZJLVO +Z3bFSbchpdedqAM4pYsLSKHVG30Hi9U9pDz3P5L0Z+wlecj8alXT9sFxwGJX9oNG +TbEzxy+9ftrTVb1C5IWFYRoi3QBrF9idebLQA69W6u/xtjLfDcP/GhElKagdyr6H +wvCzFQMyqDMa2soy6u7z5hjxohSYIlMDvx9SYm7WExUMzCu+gl+ZbgwQDi1ezKVf +dZ2FYTEs8L0CNIuptxVFV3IRPK8DD45thxodBsBv6+QYBKisYHzo+Cd8Yb/t0saE +ikDoLIqNRgDBjdT3M3lvFYaPEzUtKyRTC64FjY4xuSIlGFUJUA9JMGTGZWjV7oWQ +7W86VEtoPVa4WJ0dYN0pxbpseBqybMTMuHfAayv2B3R4MXMoiTmKq6uxdMKMdIvU +JUXlHmFpttAJRRG3/KiRzl/KCeaMQBxeNk2iPdCSM2ZQJiLUgYmvcqblqrOYtdmW +sqkYLBrJKf4wXxqEDVYvW6/1cfOxLuKSUU9zwXmMpyY4NZmNPicNt7VmAN1n21he +S30GZoo69B8sK0pQJVL4G16yn/H+I/eqqkMkmqITpaUmMY90EaxhN7172EheTbkt +OJdUVqEsHnd1qMPvMral4ybsnBiVJCJX9D17AHOKuAu2LYBy6WgH+akZEWY7KBy+ +yhwtlqBrHAfRw83LXCKavpUZ+U+KfXcmdiewdzLDlYryRKamCMVbuoFNLLX/EnDU +eEz3fDocdTT30ckju+/5LS2O4Puqqf27aE2+aWqfpfdueLCfS5TiGYlyYzMsaZim +X14b45BZd9JnPlVbs1DWrQvw2Z0L9VjGPRlbKoyb+m+uso5SUf12YVV4+foGKn+k +m2l1PkVPCpvFG+CAWLq1S5rV6SgyWa7li4oLlJrAYPB2266ZtNz+zK4pvlIcMNGo +YqWsDe91NNeOik90V4W3b17/SbA3kEBFSLgyTmyW/IIoXtyDSsXKWRFt/3DwlMwb +8yKkGLelowUoCi18q9TabGq6AF4M7+daBWkJ270ikxmwTLLEESm7p7T5ngSa9lXG +pqXqZribQOL5vcY4gVyPnk2m5cI1/7PlqrT2Kn4R9g1M2A+ONrAYmx5ztz7csckb +5nZvcDJF0XFgVkOAi4c64delaeKm4Ks04rjU4ZfOzNIRoa7wMXeO0tRMni6Zr7VM +zVjWDfkUnV76MAnVNcQg00nvSsDsiTE06OIk5ksHc8GRV9Pz9Rngw/YhdbVz/bp9 +1PfDhknB56HELIRmiwWuJf1DVGO5lMg3cx3LISEE1Jz34b8dV1jEaOGYQgSYqv+Z +QuxxVftBRjwbF+dMc7uEVPWyBiWE+xoyioOXeFsAkFNrnGADvyki1qopQPMOll5I +fqbjR6UwDLntTWTvU162VdtrI1dEZPsBpySFuO7o9bmPgF+sQGYQpvrzqdhr8APM +CFZVyUqsmp6PyPRmLxB3nCVk7vIv19i+Bjt3iwLwH/5dzc9AaLnleIVwWbKvz1ch +FHca5OqqSU6hux9nPtl1ES/4Ysex4BKZ868pgFdzHRnssTKvkWEpbH+6FlOYS7i4 +CHEwyDJ5yUpwMmCEWI4V4PvLkCZZZfWau1lTgF4gKlUVELhKjUJV//wunR9fa6B7 +2Ujpcpx4WzoV6IacU/A/POc5K3LAFL8TNq2NMmLp7NNZWNt1ldREMLuJK4bTF5+p +AJu+gnNUWMGfSQNr1cURMvQFBZqcmwgkqz3/x1nwh1FNiiUCMQgsPxXmFFY/j9rn +/KpvLw5CgxBohPT/H7ESvTJqUg0ojbKBT1wJhwseSH0XfC27SScnNe5wLglxiIQC +Nqz/erFNTQSn5i0/1MqUViANl/0Tz/BVrDYu0rkr+BmLkrBhM7ehoJ2rk9nI+VKi +7FEigsjcUmY+AoD3Bnmfwc1Y4ts6W7vI9Acz8Gu8uREUNUrkzhqT23zuZFK6ny8e +GR/X+zsonScEgj39kXavhTfFZIFDqhsR4NlBEQDDT/NizQuLDb5KGVqWLeqbW/Nh +TNrdv82e2SJN0W2Kt9KX9u0rr0UIFrpJ4zSM2ZDRNpKv8S5t7gTsU5z4QVad0Ef7 +G0FP2mp5Eu193XyAlLSuapscqBGuScMjqdUMyeePlAxkC/P3APsddAGNKA0TxU7H +/pm0+XvQvl1LPqyovkSP5k4e0K1wblKsRIye6ev+Riey8ZqQh+x8BwuP4E8e98yn +4y3Nh09GQ28pj/bTGgktHOqoB6TWNuAtICIG26yLL9GpJpRKK4uNo83JhIpsKYCk +6KRA61MfsJP/72kW0acvNdFxyt6GLoUllPmQN7GKsomGG/wZGrUoKZKnUNCU9PlT +b3oKPjwntp3Ula8bm/qD4+FKlDRtfcrbHERJo5uBTvMmtocpgoYDS6ICVPr26cWE +CjiVH7hFGXG3tX9tPbbYArg+Rn+YrA8DH4NQdBFvs+0T7tWD5XNNcW8WyBbxxs4h +ifDDz1OeT7qy9+u94/faPjlDt8kiRx9dxWpDJ6l+Y/+QzA/l4PSTfOpdrkwgwg4l +0fFxI9yeoP1q6gLlNdixcvUwK5mY6saqSkeiPl1FDV/j3YRVq6q8MWYuMUzeLIdL +VOTuaOYHAhm6e9J93ZC7Nx+kQA6y6CRvM0DUFUI02kQ9rNAozSru9mRiGmkAOE2E +CXcKkllAWVHSxRt5EF8Qp84IpHGXhctImwwK2KAzIrURw/HuoqHDhtjgTX56Cexq +d+QceF2f83a2THJyG+FSy8YdMRXmJcBcJk22mHKbN2GoxnswSlvHXAlDyvylrgBe +KU+qsOzf9CV403K1X72DJNqhHvUbI/oV5SHKKuzolk6jlw93vkbhG9ELyr/Akc2I +jpd8rTpcOo5ZsAAW1Zjey2LSljqT37vtHxevlXpwuMiKmcTeG8GixP11FYaO//cn +moJuZkNNTkBtjTXFei7VTrokqtzY5y+PxEhXMtYX5DNxfYkVGZUzobTNRYZm4jhk +9Ow1aI204A5xaEIgBmQEjae09IKXTJFlbM+iDeuQwqLcif4RyFe37uS5kl7ROLrs +24YcJC7wPpgu+RZ7clTGBZwDkLi5Ch7l6czpAquYtZVdnffpeXJl9l3P4fzKs3T1 +5vHStL86MbL9Uzd6+FuSMR8JS+b6aF9qzu1K5jSmod7uvwYPNRzJUPgZ53GbDNv7 +k5Clb8orien4/0qW57A9oCPz0dunySgIK1SQ2h//Q020zWQ86S3m0/xcpK5hCOYf +uuYtrPZ8TNNAlzUcXe+8kI/PtxB0T00TCNtksXBDKkqKp9stNJ9SyCGXrOz9rsDJ +9xfwVfLoTbjKvQxGNcg08QBijtUmSAaIPdYLm427ptflUo2Qda7g0H2mG0T8TYrS +8yb53ajDUv6oFb2X7hs8dmvu24l8XOqejK3ZvVobn8wppXzRGm3t5S2FYLKXu8Bm +R6FKg7XXxME1KfrMsKfeGglv/XrK8Hwa2VQt4BqsEnhI8cOsw7pYPhx2PUbJ3Aqk +JlxWGeYFWlLc8Nim51O7h3ymLIBTlMHPrQYEL0dBWJ+Rgg4Y9NmgkZwYGhto7fk2 +0MUeinUUYTCelCxIxmESdvYqA4IybFnm0Nr0VLEtnBT37+rty+haGKCxDzPFyNdK +xV4Py/VO0LR0e4qp/yn5naLtsMqL96wTtC/CUYlu6fp9MczwXFWVy/9c2cmIJYwm +/8YMzKwJIBypZMy8tChnScZLTeotoRrg836pghgXmn3SKvPBbnoaRQxzkyGC/7Vw +pY0NpAw3+Qw/0+dxTdTWS+fIKfTdpyQfP1VU6qfshkvJRn/5fanveFZvhi73Lkw7 +Khh2XMzL4hHZNKQ3Msy8daJTiyRVB1BjwCOmSYEuWfsdj9F8Tg1KYWvcEUV52CV5 +B+BsP91tiM/0Slkl3CuXKioOL5W9qXbLSF3ERX180ayla/tG69KwmLnc5dHF/4yU +3tlAg3tun50qjd5eFgmVChloTL0P5XaV7UVzyJbxe3yvZK9sujROy5X2bduunrQ/ +w1a6v6Rc8kTZnXzOLJKcaIWPFLoXVbgA7GCZ++Wxozw//LvqMTuijVcL82jH1Slg +u/TeQDOnr84qS5rhwyzczfsYFNUqYLfcPkzrqykaWxPC6viUygqPgKIS9BL2uX3K +nXkK+dHTZSr/8zn+tirg04YJjgAMpNsJr0EINQ/nGF+xtVmmhmEMvz+AOaTETqyp +d2QehC6Wzc2h2uvKGAA7ZEArTXM7JahbH7rEmvf+O/EmJAjbpsAW7kg59xbobB6H +nUTo76mde2Av0gyujejHehqAf/+e+/xFej5suHO41Z4baUZ3uTk9d0f+AqNJV2FK +XJD1HtOdecyzulN5z5bw1BSkgzhwHbZZhW52nWX58dEDE72iV0qpau5kQDw0MJcV +d/xQ1Uarm/JVv0WYfF5ZE+wXw4Yzp5E13aYVdMzaQapi6IAzdmp7kArcIB+fPvW6 +Z8uDs/zo6aqJmsyoLWDc04Q26b9FrLGJvIaIv878WhIrwjIld82JlUoGix73gz0+ +xBMynpdDej6cPEzTh8FHIXDaU7dwddZhk+Ulxl48YOukUHN/0sEYfXCwxxvH9AAx +w6ymgw/6tByOjItiry1HZLkCmhIunNY/d7IE9wgFteaMbEdwxVZchqsPX1yHw+Uh +YxjBHL15F3pocXUTC4p+xL1gygoPvU8gLI89eWgIkR38qXnz5NQQPNv5+2/Bmt16 +JZlNI2YItNrS/NAcIu/ay+i6O9iltc3T8u0LVgqg/naMoT3QjLo2PFQZp9jpZ+AB +aRSMV4wnipz+Xum5uRP1yy2MiCcezhIsoPn9Ig3A2j2FBuJKqTVeSngR9GLTsZeL +EWaWSNvOHxHkg1jkKiNpyI4A2e+lWj63cyJQFwta9CEVNC9HJ+52Y8UrKIpD5U+C +pWHYSDRNnVEhIXLdgiFs0QHUNuannULPmYNlw3GnOAK6XQcJ8lrVJ34KOKmKWanf +CRh2q/g/9qLBlTazvFbJ0btMjfEMHr6OF07bCSGWaqIrY+4+Swf2JvXFvJMSRdaP +NXbLLYKdjqWwvp3eX4xx9qRZ3u6kW3/WbEmdnjEEStErx0I0Oki/a16rE+2jQfLG +E/SqcwP4nJcvS+umH3CHncSH9AdKTybc2FmU7232b1OkYTHg9c1VUY3Fv5DA3YOs +xwARpaQxVhUxkcg5dXiC0V8A2g80jajKQZSr61kTUzKtZVOR38fDwC1jkLbU4HpM +lKHEikoiYjn/CYAdHonsWmeQEyiv/jrN236PGDvhQRzi78YykrvwKlM3Mg52vW9o +HA0qBHiMiImxldw3M5hXpddDRn2Vh+FbWxw8wwLLA6XsZifjcCWhEpROLJI1odQI +uEh6oZVbEhFcyZiLled+2kt+Y6QchwwoQQf3e9TE2nH2MD4qwjFcZivnJ487ejjP +uX1cQsSv7+YxRejFtkVz5zELL00ylveAzbgXEC1+84j9rbskjqfvceBFwy+4iG1W +XYxBznYdoLZwixneZ/JWFaePJkqd1RSKjAxskR/2DXiUmjI5eH820u1oMPWzDXsx +kpN8AmmccoftaLw/t1yHViwM5jgMG8i9c0Hl2qKSkfJHvy12FkiHyP6hqwYynib0 +zf6Jp3pfQq/yaF6YzjGX1vm+RnS9Wtyg6aw9G4ZrtefKlg5t43/vMUlXbTUxTqlt +2QHeHFWtYD5LYqVY3h0uaMQnV+Lk1TyiPeh60fTA8tfNbOwjzWbZ4Q0y2CnnDGWN +uFW3Vy/zhTh6j2AkmAurXaAYQKXPsQG98jgagVEKPrlXUC37lKtQ3Q== From 8cd9733cc4cb63332d845938423c7d71e85d56d3 Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Tue, 10 Jan 2023 15:59:41 -0600 Subject: [PATCH 011/413] Added apropiate error message in zos_lineinfile when src is not found --- plugins/modules/zos_lineinfile.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 89e080a07..42733ed96 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -421,6 +421,11 @@ def main(): # analysis the file type ds_utils = data_set.DataSetUtils(src) + + # Check if dest/src exists + if not ds_utils.exists(): + module.fail_json(msg=f"src {src} does not exist.") + file_type = ds_utils.ds_type() if file_type == 'USS': file_type = 1 From e755609a3f03e09183fc7a216a5db434f33b419f Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Tue, 10 Jan 2023 16:07:33 -0600 Subject: [PATCH 012/413] Better error message --- plugins/modules/zos_lineinfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 42733ed96..566b13608 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -424,7 +424,7 @@ def main(): # Check if dest/src exists if not ds_utils.exists(): - module.fail_json(msg=f"src {src} does not exist.") + module.fail_json(msg=f"{src} does not exist") file_type = ds_utils.ds_type() if file_type == 'USS': From 483ee63050efad722247f2f031f003d936bcbc0a Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Tue, 10 Jan 2023 16:40:36 -0600 Subject: [PATCH 013/413] Added fragment for changelog --- changelogs/fragments/584-zos_lineinfile-error-message.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelogs/fragments/584-zos_lineinfile-error-message.yml diff --git a/changelogs/fragments/584-zos_lineinfile-error-message.yml b/changelogs/fragments/584-zos_lineinfile-error-message.yml new file mode 100644 index 000000000..f32bdfc3c --- /dev/null +++ b/changelogs/fragments/584-zos_lineinfile-error-message.yml @@ -0,0 +1,2 @@ +bugfixes: +- Fixed wrong error message when USS is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". \ No newline at end of file From 7a5bb929646bb130a70970edec82f0e882870861 Mon Sep 17 00:00:00 2001 From: Demetri Date: Tue, 10 Jan 2023 15:05:38 -0800 Subject: [PATCH 014/413] Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions --- .github/ISSUE_TEMPLATE/bug_issue.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 5ad715b99..85743b84b 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -84,6 +84,21 @@ body: - v1.3.6 - v1.4.0-beta.1 - v1.4.0-beta.2 + - v1.4.0 + - v1.5.0-beta.1 + - v1.5.0 + - v1.6.0-beta.1 + - v1.6.0 + - v1.7.0-beta.1 + - v1.7.0 + - v1.8.0-beta.1 + - v1.8.0 + - v1.9.0-beta.1 + - v1.9.0 + - v1.10.0-beta.1 + - v1.10.0 + - v1.11.0-beta.1 + - v1.11.0 validations: required: true - type: dropdown @@ -94,8 +109,12 @@ body: multiple: true options: - v1.0.3 + - v1.1.0 - v1.1.1 - v1.2.0 + - v1.2.1 + - v1.2.1.1 + - v1.2.2 validations: required: true - type: input From e0cbf2a9ffa3bfc6b7e5f1f784bd85f34038da7c Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Wed, 11 Jan 2023 11:56:35 -0600 Subject: [PATCH 015/413] Added rule to ignore python 2.7 compile not supporting f strings --- tests/sanity/ignore-2.11.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index c362873c0..d4eed7091 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -61,6 +61,7 @@ plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Pass plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_lineinfile.py import-2.7!skip # Python 2.7 f string is not supported plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_mount.py import-2.6!skip # Python 2.6 is unsupported From 0c4757d99e390144ee0eb92ce6d5d3722e1bb452 Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Wed, 11 Jan 2023 12:01:45 -0600 Subject: [PATCH 016/413] Corrected rule in ignore file --- tests/sanity/ignore-2.11.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index d4eed7091..7b82b5e80 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -61,7 +61,7 @@ plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Pass plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_lineinfile.py import-2.7!skip # Python 2.7 f string is not supported +plugins/modules/zos_lineinfile.py compile-2.7!skip # Python 2.7 f string is not supported plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_mount.py import-2.6!skip # Python 2.6 is unsupported From bf022520a9a10d1d256d7f67cfe807ed16fafb44 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 12 Jan 2023 09:33:09 -0600 Subject: [PATCH 017/413] Update 584-zos_lineinfile-error-message.yml --- changelogs/fragments/584-zos_lineinfile-error-message.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/584-zos_lineinfile-error-message.yml b/changelogs/fragments/584-zos_lineinfile-error-message.yml index f32bdfc3c..fad485765 100644 --- a/changelogs/fragments/584-zos_lineinfile-error-message.yml +++ b/changelogs/fragments/584-zos_lineinfile-error-message.yml @@ -1,2 +1,2 @@ bugfixes: -- Fixed wrong error message when USS is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". \ No newline at end of file +- Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". From edfae9a2ca661d9f52764c1c634dba1a031ae19c Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Fri, 13 Jan 2023 10:27:28 -0700 Subject: [PATCH 018/413] Added missing fragments for issues 309 and 408 --- changelogs/fragments/309-replace-text-zos-encode.yml | 4 ++++ changelogs/fragments/408-restore-members-on-failure.yml | 4 ++++ 2 files changed, 8 insertions(+) create mode 100644 changelogs/fragments/309-replace-text-zos-encode.yml create mode 100644 changelogs/fragments/408-restore-members-on-failure.yml diff --git a/changelogs/fragments/309-replace-text-zos-encode.yml b/changelogs/fragments/309-replace-text-zos-encode.yml new file mode 100644 index 000000000..b4ba2b53d --- /dev/null +++ b/changelogs/fragments/309-replace-text-zos-encode.yml @@ -0,0 +1,4 @@ +bugfixes: +- zos_encode - fixes a bug where converted files were not tagged afterwards + with the new code set. + (https://github.com/ansible-collections/ibm_zos_core/pull/534) \ No newline at end of file diff --git a/changelogs/fragments/408-restore-members-on-failure.yml b/changelogs/fragments/408-restore-members-on-failure.yml new file mode 100644 index 000000000..3e6c50d12 --- /dev/null +++ b/changelogs/fragments/408-restore-members-on-failure.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_copy - was enhanced to keep track of modified members in a destination + dataset, restoring them to their previous state in case of a failure. + (https://github.com/ansible-collections/ibm_zos_core/pull/551) \ No newline at end of file From 1aa0b7a6114553b3faa5af209d0cbfd072f8ee5c Mon Sep 17 00:00:00 2001 From: Demetri Date: Tue, 17 Jan 2023 20:06:06 -0800 Subject: [PATCH 019/413] update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos Signed-off-by: ddimatos --- Makefile | 4 +- make.env.encrypt | 550 +++++++++++++++++------------------ scripts/mount-shr.sh.encrypt | 142 ++++----- scripts/profile-shr.encrypt | 394 ++++++++++++------------- 4 files changed, 545 insertions(+), 545 deletions(-) diff --git a/Makefile b/Makefile index f0f6cd9d5..428f5d602 100644 --- a/Makefile +++ b/Makefile @@ -68,12 +68,12 @@ encrypt: fi @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ - echo "Remvoing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ + echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/mount-shr.sh.encrypt; \ fi @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ - echo "Remvoing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ + echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/profile-shr.encrypt; \ fi diff --git a/make.env.encrypt b/make.env.encrypt index 84560ca7f..f1b9636a2 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,275 +1,275 @@ -U2FsdGVkX1/dQmXuudOK9uJaqWJeTP647AagRJezWwFRQK4EkCJEBSRNfmQQgjKc -DFfnSa9YpJ4zSe+ecwfWZaqZUryyHC50YoGjDfqcRNmJ7Jw7vKLGqsM/KidA7IPx -6Kt1r14wwKAbZ3VadYWLnKk+CwGEMHq65wLSrP7GMVSI59+02/WpJuEjjfAXFb29 -t78+d5GTXzNDK6VgjAB908YIMfgmSeVfAP4IeqH9PaqoP0ExBYSgV/TiQa3L3nQ+ -Js0EwUsfroEk6/t9CfbJl6ZGLNPPZJlIyAAHmd7B+MKgb6b4YsBlc1R1GgghjZp3 -d8HBAahrGwSwup4f8nJMGZkDHUFvOSfQ0lp147zhpG6DkxtOzluwJZTI9hQarwR1 -o4V//OvnYxBbXybrbh3fyDt3/r1x0f+RMjQ6nUyDIQgwNWhoqsvPoQutCes63OG7 -AwRVYbjKq7pMKPWn7KOttgBJH1Bqka5TPZ9sEecSkB+wz7GvQgUrbpBYCDPcR+TS -a2WBArd+OzSYJHCj451kobTzATKPwLS2Tw0N93YG9zZ0738dydcIV7WQxQC/vtQ/ -OGQ6/ttLptEffWkICNkWpRLUsdB2Ih8HW7i9q35ynYmxDollbi7dRzulJiqbGre8 -yUL/a7qLgSD0cwIR3tZeV67YxCKeMP03fWEhrqkQwDH3tK+fYe/YeSskbqvlW4C6 -450ekOKq/9XrtenW5UZSesulndB3eqai1RdfDEr42MtWrYAV4cG7aPpy1E8JQkbs -xo3urfYeUDl7hpe+i507fLkoio1T24E1LpL1ubnw9YVicqR6kqGvuf6PfoeOHX3I -mlc7L4X5wfOKIxY8TTPjGUiCgAWQd/AfYDyo+X4KqST2EMUgevWbbwdCwxyenRcS -+8QKwCEvkmXMyeH8NrOVlji4RJAi/NUPmdtgSgKLi0o97VClp5B/yAm4WB6mJ2DT -GZs/FN8N2z7Xw7dODphsAcKwbZjyXALRJ989+Xeh+q46UNumNjePtrpzvYvS2hYA -qk917ml6hRyPiSufCbvtDGeOI0rK4T1Rur1cSx7VknePudzfnG9wbKuJ5Q38ZlKX -dOBRCpTmtthdBG1iCJ9o1av+QTud3+/r9/4L+3y9H/HA64iaU14EUs/rAma3Vyp+ -BHfnCOuZRkohEl/BCpVB6MwqK+H6g2Axh7Z8+HgsNokBgWub2/wmdeRb15Mxt77c -iGizz55NkdJ/Qv6QSzxVbKDG1aH/zhSMo0hIZzYJZ1VSIJ/YfFzmHDpRKAc9UnKK -RoPYzJBjyJ3vDanvgvhTtnTpmvxgwUodA4sEexHw2UTP3EtClj8Pku4zJnNB41Ah -Pkf5v7ondEwaBdzKb0z9MWC01CdnnJlNfjfYvwiQJ+R/2ziyhybibvm0Lwb8zTy6 -Lk8aF84G+t2zyLVhikQWfjlQ1rIZyT9WHGYGC2DtuM7fextilmTFx9ZAEtVZPQLk -PY+xVTL6oUpu9xF22NDAdkiDT2OBErzRB8nzjSBp79fsPFnmgBTdsCUYaNRc3VK5 -4dkEkEsWSDH/g9C9gdHrzUer25LPPhHNRS31z/DT1lZaZX3zcmBIXY9Qx1clwmqt -CA1gGd8sOnrM45JNzPuL45ZT1qfhjHpW08y/n4e2g3Kc1A2u2Yxq9YYjpdvXzAhR -OHeR+Upht1NN0paX3T71upRlZoIIhY0/81DPippQg5NjNr9R8M5TQ/93oZ9DFH4+ -36qdcuYkqfUwAc8dT9bMFMG3pPKdvqNs+3IQ/390P/DJtnBYeRkIw8VSiGTV61fr -F8nwihZD/LKWitOSJny1MRHBe+UOgS8q9vk0NiPoPMEKZ3YJefCpOwh0/mnfDb6E -EQjec9XOQeu6iY/s53XccJFF7UQIL/zo5EPcLWkabOQPXSf5w4HeCJi24I/wv/wZ -Hd2rxa6hAAmSMbdwFjdAEd4Pqzd+mjztEaXLmxz6m6IOc5aElLAYd8Cz/5/oArPk -6R2CntX0Kd3HfPLDAuE0E1t9jV/3KgT6BV+x+bqIzIo5TCx8vE4Wy8ryxyRomk00 -FF86CZSzBKwNE2sPrE1fIVZtifn5xc7SCpxaopaN6FK4t+R1Shcio6tpMDG8/t0F -TY/oIDuxxtYIvs6C/eGs2TxbxvjOls0VdZOb00C0oPkIM58TOi815GcdK+EhxuNk -sM5YSh1zHjR868A+dHEinYLfKHPFyKTr2l97rxThl5T2i+jnbxiF7jS3p/XNCFoG -yyqJFUXTws3xPH5EaS4PEhURu7uxrJteie6bGPB0qDb1QxZLyRNaw4zdj6Uf3Olq -FAgnCbbHm/MMUEWWzZ4K7O1A3IZPwy81FfylKjs2rdZW/Nho0fxPmo6oI1VeQOB6 -M8b9anBNtBEW537L+3noJecitGdRreUijS54qrrQOAV/1C2tpIQlsF9V+JbbFV9P -9KDnYcDi7HmPydehAXHITMjhaArmuNfJ3WGBEV9T8Cjan1R9NQtmUHMEuxfCHf2M -RXfb/Rn22iNsdYmBELJJ6hF5PdKp+Wi7R2tY4auWYCpAlqp2qjy6x7miTCXrf4QV -Rz1gHWn71P1FyfTgap1M4ICqdmZKFPUIrLJqV/on9SPT/KPM5q+W9sLiXo+K7K51 -nbNvatChQ2bTe+tSFZ2d7NUTCSDXH1nHK4DtlWQ+FEVCNZrqAxbqhFCSPvAlTuz9 -eK+HkTvvm+JXGBJLdT8VHvffNBLfakkLf6OqTqLu/b5O7gB7A17mYxX0ifqFNvrB -51x4dPmhu1vs63Y8fo1vaGEOx4BrS08HZCFjP0jPWjemaffSCqaO3kDgi2AmdCxt -5M5JEbp9U05aBoD+q5KsgoTR64sHadhZx6zA+hqNMzsRCG4eKTKFchdbgpjENrWw -KzKNuv7CXDtdGLIcZW6upozo4z0WbcsTHQ7G1kH6YFXZnrxHXFHrHUMnNaBGf8MX -hvcnZb2w7m1s+gN6frHnHGwJq+l9dHXVOWwHGbiqbiWqXzZh6SrfRVFfbd6ed4aS -9E1VzfWCG4xdhRlEPAqUBvkDbDsxoaj1vXxwPwwqaKRBbrbCJ8NPuXO9b6TC0yEG -74h/DMtumEIVLXTwFWJa1au4ZPMUXaCFW5nUJnrE1hYjWdi5/ro1Cdq9eIDkWVTQ -0xZBzuXKsnNi6ysQlzTZ4GyOgiF4Tkd49nuGwgqjCPJKauCyVVBQzRxfpWYWM1tB -r6A5JWz9EMRJraXwFkcKwP+olG5/CsO6eF+A/0mwjdQlFlQX7x7meLhdpTT8wV8W -/yWqmzhSR5s9su/Gd/q4hdWBYbbiEQYUWrQiUUSLUHwyuF4bMSTHZOcL7YOjTRjt -SQO9/th8EWrs+X8qV4ElDlEEXWjLcmZ8juPfpCApkiGxmc+31W8IvvrODH/KLuJ6 -29PYVfgugaGCEgIU15xNHLgSWO7IvHHZhXb2C93ykVlabee393TeoNGpRZH7TFRA -3mZPz6OlQoApvkyrmrlSWajCLfXFlWAspy2NQAValZsaQhzRNM54DB9XV/RWoHob -26lrCL5iqTs3POXZdPbZHVzC8scugXCGUdVcVKEG/fnAuUl5HE5AVpxt4m/wPj1l -CMo4G7U7DrQo/+161VlnY6zELsJL/8MVeMIzoEfWe+Uw6uroNFNg04ZCdS6IacYc -rKwzPZJjCIQq0n/NLlJzLgQFrzf1Hy9NyHfRFcguUegAIcOv5rTBlO4P6OBSdtwl -3JgU4enrHHKkans9S2Vkt5CZ5smdjv1DUSG02QTEpGmYhuJGxTnUwymGpmPePDtD -o8xLwLrb3zi0ht34dGWenzBFX9QeEAFEAyA9SMrwPTRw6/xL5zIkgRnC1yhZSWz9 -eQYnQ83yQIQQ/mrOrTOBp+YW3AL54Kj3VgxsXRIyqjRMX9Z9FrnNB3b+eHbRbiEo -5al4U1wEgRfCu5mSqq3wDROE1Kgsv/0/Ju4NIwatfSxV4S7l2NfLhY4gbPcx+qMU -CKNnLldJBgl7HappKb62MMWN1xx32ORwfQ6Dnwe5utZiidfn27lNbGjZLKXGBoWj -C+/jyoDuxh3vvNuW6Zfca2kYhKcf78xG5sMdRBB56a5yDBdvjmM+TLhNRdfL0bch -varecctAk3wKeO56Hstg3xjmjF3ItdoCiQs3JgJPUcoACGsvnWILZKKCJoa7/Z/U -XweFl9703N9qk11Siq9NFiIuzHsauNkPprAQ8uamC1JV3dm7L1fNVOBbCvTkkZ0K -NOwIrx//OAXPyqlLR9SJ30LkgzvmbtftyYvpMIPqMVgq7qjmebe3YdonIzoAJPwq -ggfaub3yZNLucKwZgiOqdW280b6k8SEuDDtGYsj9v5NgwKrpaeQ2z7oZPiTUMeCJ -yNC/MQqhXC37IuKbLywRKipHrcNKpz2v3bLpmPKIztWTVGGy52+HeR8NR98LMucd -qnZsaGJyi/7ShBzPI+VR2KQU6ibIBsnjCF7hqV1wZVX+D3zSCRXsimlEncHLjDWg -9xvWNuhuyYBqSZYyFVz1N9MjY0QGH5SQWC9T359LTyyHR+E02ClCBITfBduHEqcV -yMpNUE0dXDLU3lxx5BiiswFfEnFtqzKaXNbrFWCy/Dn3QiXw/2nbpXG7ZvyVR3CW -FKEF/xtIn25zhgTL0o8eTA8sv5E25WHhP1+UKtiP3j3rWy+6alRpa+OAcDWWzEd9 -9spR9Hy7iYuWNHget5nsxodxttKduPaA0MH/aM2N1b/dU1/gC3bTX7ConfDWNTlX -Oc2ed4frD8vrkmeQ+12nkWPR5wOm9ZRoK6tJEbVBBcdZhVhzZxXC7RX+oU8Vpv3o -KlXGgMBL1VzXNJpTSzT6QK1gDNx3wt1Jh8wPsBvfkyV7/pTIRys2JVFkRN8AdG2D -lTD++e05b8Y5dRNUdC5fZFqqi1CnYXq1rRME6Tscqce/dBVLl0TGhVcdAhTlmvgO -l92H5adIPVav6hsGMtlkEj9zhp9O+OcZj2Va51ypvJoDMr8Ks1QEr/gTieRNcgtc -yDB4g+i1d0jb4JPdLuyEkj49fQgJyOgFbHS0dInMqH8b8zq2kFbt6Woj7IEktkI1 -T7JhHovq+aL22kelfiqDaLmGz8Ah5uT/TUL9eGJxfMf8xCxLt1mERbuMNjjUQpjm -jNRxzPhZwT5zrFllxw0+hN7rex6f4r1xhdWvxMVZs1pbbQMU48UFN8i7ANYx8qC/ -oHbJ//N528Q8VhF56Kr8W51bg5rhPIUcGWSHmUKhWeuXXrBSPC7+7/sXRyxfxXml -kqotKcGVd+Vmbhd1jt3oVzu1FqL0udJeEkJuWdRlPSJgb+f3nVScLDw4wVLLeM5X -2159lwvblqqtB7BiNiEDEeI2Jy4Fs8YxY2YNp2/ldpJc7jKB6VQ+H9SRn5wpQifF -TOoLO8C/ubwoNYSEO/UePuYVLDFX4KsPnZ2hY89N31RBBdhrOgi9oLNxQfOzWzpP -RMpjX9a6aXRGfqGGQtvDi2sWgfLvN8iQf/slB+z2mckdbamtbxOry1riBOVjdeaO -ksXhh6fq3wa6VFhRYkyM67zo/gdwAFDAwmcHX/UEeFqlhDqT6z2UTDCr3YBTBeca -QddvuY0tGI1ql+TQ5GCyBAXUkUR2fZEQ3aVYbOHs+YXyMH6iLfgLLo0Kuv0cgAOS -huJ62BtfhkXzBD5KlYMNBS6MBkKFwcwOQ2corgG+ViRS2IreIQlKMIAt4sn9zs1g -RsJVJ13Z4KOP5b29C/NhMQRNzSOFHFewc2uNvuNuI74IQpZFAVRwDm7kxIdOzDNu -8Mb1jzbLLsKGfqRpCxEZPV+AuIRnMdog2+sCnkMDvbdIgulHT/S4WHx290dO2HrV -UOf41Tc5QcWpDwnG2cKLMkweIdh6HeoW7y69FRDqFho4dLTHTPvc/a37t6SVFF3K -0O1vyDLK5F39GTdW6ROdWjd4IdjQ4R2qtLoxUM+gwyz9J782/AlXcSuc+8p0XnLA -nxHCwf6AmNgoDmI7a+WQiaqWlYwtVfscwnjREqvAXCtqqTkEg8wWOhVFIjWqcPwp -1fmzo+XsSDY+uoAXMtdSIyAFjhsmP5XZXASuvDR9htX/1iKI2imJGj+KilYMmbDu -wuqfLtLTytV7WOjI5W6qry5xYlYmcdM586WXEro4p4A/6droqsw58czGxPtpgS1p -k3cWM8qNQk4DflOR3deaq6nu3wUujnt1QA3jmTQMGSnVbVV/W7kWcLeglsYfr3Lq -9lILrj7TTaeGMsJlCN2TDN74eHCnwdjsJDkl8A+Te0TZm+HctwAIXTurvM2O9CY/ -bZc8NfQyVyVBqkzFViXrsMQ7/s5fMliIJRx8VlXAwl0Td5GgwQKP0hLILZG0wpDA -rS/u47IY0LYwXMXY254dQGXUdWtPUnkjb2EpdMbHsfX64NyBff3N7kv4wJENpktz -4XFtT8F7CCldM1DS8RcejeM4KuJx3FzQyabocKdTbJ9m6g9AUclXDz49HoeCw5+9 -nQlkzf0nlvYJNaMzsxCVpG9nZVfejFLqNilVH9TEA5wSHyRqY4WXBpI7ypUlDrQS -plFGaGHP2cX+d+36ZfQ2ZU0nizTFL0aLB5uDhKDa2BJj4Pp8aPKfhb4Ea8+dnx6S -QnSduizbVtfEQXoyLu0ICQcE/p3u+9FazKF7QEgQzkeI3MahoyZIfDK9A/5gDv1e -WEvttwvk+ax7EnWRx/W0GUiCHE/L7qV98+zErMeklcbSWwhI5CuqUfwq/tAyRcv9 -faNKX1+o+IuF7tCApQmiZ5xTqvnatM0VAi/HuIoVqtF3V/NAEKQ79rrQcXW30l48 -tGTWNo8+BGXhFYxs6O6B/RBGaMEnWXGX4Eel9k3+IngoDK0BP+fDn8Zwt2bYo9qF -Q/aFFPKItfWSXQ8t/ITtMaNRkEPgwP2O98NTh14bToOaIkwQqKIA1484ajya5dVG -HFJ1LbauzJiEKXv9xIPXxGFqcnkxPdmnnlfpitvvJyptbOvfUIXid8SpOF8za28g -HrXhw9At7t7uysI6t7KC03Q1pMuT7vH4VHyqQHPbTLnHaWhSPG6lv1XgOpax2Tsj -y8zoj80GlOEyJmCN3+aEFBXzmsadz8u3/2doqO0WdxTZS4+HIk4qOUaUk7LDxBOq -qEM8KioAAE61lDc8wTtjB4r4iSCkwOu4/zoaYoTbBRPptO1fbVDUUI9JIo8v7F4B -+i4fAZYBd4De6TUnaSVptHanh8IRI+QZh2mKFtQVI4dNg0kUFmCXR/dQD6UflZ7s -uzOzjKOLwM9AmsuFJh2k6Q7oe1dy4wsggfjlOJz+JwbItiKCx9c0Tr3hGf4M8Xay -EXVPWdGvRZDFOM/aVmYDrMErJNPxUtnVQBAzNUgnMqxdEFHhg9SkUHMai6hBBACH -NU1YO7tj4FJRLywfu+YmNJAV3rUecmytfkXbQLzXqIEE01bQliQ3e5n58CyBoPMm -bL4p0BRhEiA6hw2jex4c4u30sl8CaGnj6VRVpyTcMyNO8Ge72C58RUFSbm85vHpH -QBFlPs2eGbyCw3qODZgkXUte3CGWu1EBK9ri3h6PFgfsOz7euKrw0+pc5K4jfkgy -IGkOODvNT2rx39YPR8T2IEw3tyWKxJkuxydo9QOV3ElUPwkvhoh6My10LKQNkXNM -q+ExXJJ+iHLVHOk9e9JNI5fL1rU8I7jCL4V9RQGc6wuAwqp3BzUk/W8Pa+COTzgN -qD5am21mQa0qpb9XfNqehs8aINRhi2sTpm+g5xkZfDJxQGyI8qaMWCwLoMPe0nZU -dOOimc2QRZ215O5J4WIaAjrOmrBQqzcHvrISmJ4nL6Gfph+C0cWk7mx2QEWD3zYr -pvU92KSK5rt+bS5g+MAdrI7NNI5M7dPooFd1xSZ0ZI/UEuerH7bZjM87Oho3GKtd -fySGNv6YnGDx9/EKYRZtpxX0FhHkXOdPtDenO7ROdbqomAaQ0zVZIPXfwPYgOKrQ -vIGdscOkcChjdCQSigPYieiR9Z9Nawg5auUI20seu2/J5yRNgKsDANNV26H2lHNt -X8O9CL/trV4BELP7Qh6/2bPJ5V5cyzk6sj3quzxOvvenclfXy3KOqIiDmeR7qbWv -1uK7wgtgl0WUHwN01PSPCaUEb6jrYPJUuusm6ZDheXelfit6KtdXWUTsQxKDCIXL -LPpFtYPCVH08ds5+YvZ6bOKPbpSVH63ixVFlMpQl494OzjcvbGnOx5IkBXKKD2pG -ONnSnr3FeBkrI5jd3uqFVZ5gQ0I7VRfhX8y/9TmAjy1wunw8VdSwWZ/ds8ZNNUfM -TfEwr6mwU6vGquduyGDW/ExJKIXWBhN2g7kmxbp9m72FxTs9NEP+cWETkxL3L74P -bQMRKo7hLpMRgibuHJYu7/6ixoylIp9LOZcMqVLdnRAzEC94HGOSogmWVfxScmUJ -z/9iot0ikASs1V9HXf3GMtuoQ/LEFEHpr/a6BkRyt28tuC8wqtAPlotln5K9rmLL -wTpPK2cfFh/5LKWFBrw+2TUGAejpw5BRCWy4E6WguFQcKT9MD4Nbk/SKLrgUKOJt -RqxTqDF20bJOePOlVuvqjJois7OZrvyvPf7fY8enCA+ETYnH8hDAfvbz7TNo8AgB -GR5FtwV7mZPGd2391KkOdr3s/WX6qHVG91bgHqsPe45gSLgpW+u4d5Wc0zVRU+a1 -HNUl/djNi4XmGAptqsar0lYnEsHgjCJx5iz3+wuqnVuVJVzpgPeMsmFW7Rr/5arO -tSBLzcGMPBHzfA8/l0+MyG4ddZj+lciQ44vDnEGiUU3xyEoDi5e/IpmjHm+e2eSo -DI1WTPmGZ/PFEFp9DTvDt+SXmI3YjJjnGHtSOOvNMgD3a0hR8OwmtSAAerSkYq0b -NRCAoTMPQP4LezSuwsNn092nTVgGr/MFNfKwiJcIQU/ZFa3kvIa9T4aUICw6Rd9+ -p6pUz1rP+S8xx1Ipb/fHcIp4+XYSEIto17V2azovu+0tzlc6MAXetJ2Vwq0O+pZp -S9fj8mO085WuxVR3TEGQS2dKseSFNCn5FsQRiA5iAfxH6zxbTK4oLazFpT9ovO5I -3sE2KAIkZzfg5Yt9erWmI5dN4nqXj/jQuJ0HcjdNZOY+NIxeGnBC3TxbgspA87Sy -nUqIUshyNd7qTLJ6oLLNFYm5LdvYwXgLpKYq/fCIGSu1zUlLqq6YsVTgQoZ1PF+k -PfPBOTU0AMCbuUD2LUOq6Yp1H1USBkUPeKuBnyYx3flF6TNlz2fK1Bl4cbUt4WZt -9kd1i6Itvp0mBQni1Tx29KQvtdzy6ffX9aqkEjh7N6A82OwSWI1naJi++rMiW4Dq -BOeeJV1i0f1JEZndVSxHpr7GVuEWK5XDAJAeDfwp1xFPf1Ct+/rdJISiToQR2agM -SDkluezwjUb3UgELiGiXxhTNvDaMQ9sdix2hRXg0PJOrtC86HuX4PNnClIzagJEv -Vzb0pLO6jU0zkI2fcVT7ooldy03on1+3S1Enm3OHV2QApQJgcKtk4+v3qQBu80jq -kNPuUDJUrER1nxT3Ehf3cOZ3Z5p8HqTtKKC4Br8jV4Qs/kbhXKvedtjadSU7U4je -fsp7LimVozF6YA1bpitff0ejWCITA9c17A9Yv3IYQU49XLNxooJ7IlFTPmmjGdkU -V2ubbmwkAoHyy19FySqsuDUwCQ6S4Lb3Lso0kxprNP0w8QP6PVbUT3qFDdvn2hg7 -GSQBafoIQDhYG2/tOKWpayLSro/iFHSlHfoKYei2UBi5ym4qFQ0Bgc/A3i9eYXsY -3nAbi8xla5OEhoLOmxTN6DAqGAWVlKDPpYrWR6DTKjG2yONG3OY00xUhy8jAmRpO -rgxyqygHPFcBpUjTwqQE9LvIj8af2BIlI8iRcqo3vqwPRbuwTwNkKFVgyWhXoTEV -JUGcdDXHkYC1NUHBOs9lYLR20MuHaa8ue9iO7b7eI4sRxwJT8byUG8ZfoUTP6har -QMCddHAVbNKcQMbKDOb2C4UpX+cMGxuPHRCupGxX4yL+lWyuEYvl3XwE5Xoo9yjL -ZrVosHwHNRZ28/xLK1ZwFo1crsDO28s6E/hiw2wORGALvrMEpE9872RFuhUmSKG+ -/pTnHEEmGKY+8Y/ZqSf8KfNE5lbvH2wXon32NdX9M8lheXfYAOYXgi2Z4x00Ep9D -vRzqxJOZrIqwmwZ2ILot94oqQhki5Y2+THfUCBc7IG0IL/pxm1GaZhvJxjC5n/xn -P9eS2W1CEIJUeQtSJCjumTNFIWwvJST1AVWt5GVjqFlVT1qqciQM63Rp2OU+7DMz -/CfV9egGgMW5q1dLcLVkyhbDBrg0lqzTHs7wOot1P2DPwZK9wi2LW13q8ZGWsISB -16LjJDeQlRnUkT5tVrse8UnKBsujm9Dy+NOnkP8rK97FD4mHgGDu2+vDz2g6kiYg -oMEuUvxNCnlUM2oa/mclef7ULysMfznu8/ANkWikuEi1BDIhj5ww/8R2N0nwb8Bs -rpRBK/ZojohyGfc1K5iLN81ACHEIy7BcGpYyutkSFDVlUhLKz1+bgB8ZV7VJtFYT -diJu+5yB4PcHNQsF+9RJOQuq5qYEfVVP4JF5k6degKNPUvHqJSgDGL8WC/eiCCkq -MbR9ueZwkHW0LCZ2WGd8Xb920PAAJic8tpvDFMyogZ0OmdwZhPlI8kUfV/STqmmx -N22irC+BB4E0oShiWoUK4sqE8vloqEOLcjB4ijaCNhkBhEtA72jLCMngidMChcqq -+92tHWKKgNccDBF2yaydlreEwhB/bx7yzxA+e/kCr6tPxb3JeuOLvbwvG34D5x/5 -MjRs51RmVO/JE/vsob67PosrylzJW/NR94rDP/knxEzb183sRWsAEUffBShWFd9s -ee7X+4bHBraNz62P1CinP9I89j5+QFXHWAtf6I4cP5RLyhYnI6yFlvU8i8AM1yap -visnzlywV4zf0K9SpxqpX0a48kWUDLwdK+k/or15jziQDQ9lXTDhjuGvK8eDc39m -ZZw56RcKeu9EeD9DnAzisKEucmKMrkbMLUi+huQMuPRKlE4vIUR+/L57BR3D2fN8 -BTp2tGIidDzvYCndK58KfitmjFHaU9rI6m1UqfkZBQ2wt9JIbFBQ9LWsSKTaEs0h -usPT48tVzcxpz0u5ED9opj5XRa76YkvE8pZjHGQvYytx68M6VmjZXPJgVVByBWEc -xoXBSFBYr/OrreZludUEPbSbjWVtKpwm0V/Hv/7xO/TrWpxZR3Baz55EteTrP+ia -LPxKf2DC/p92jor0ZC3qKvXf549W4rtwU8jfX2WnN0WzH7ETjqzitjl+RFH01Ocy -Ji0xN7W+3OSKebR9xkXCnyHvnwRBmvJtaAh/8uW9KNX9FiUkABrmFzt7T6luri6B -cUnyD3hjwTQy00dEZd8XDqPE/AP2QkNtwl+FE4sAhGosrk4OEqvnv7adhvftHAY1 -2FoRlF3UGLe8WLEbvx1Gd72o9wUXPjhNf//Xd6tgB5nyzzjLOkylJdxyJip7Qu3b -8dkwO5aWtPMk/fJBd05+KAK1RjkWWvGrfm3TU17/e8OqS4NCwa44AmiR9hTOnit5 -0jTAvSidad32EYzrwlsG7bHuOgNT0+Y2OyJI7CMx5j9zdp17h3cBCzJ2eXaEWD7F -28ahJCryBSPODec2Gq1vkC/OQPvUIltfdz4P4RpksUbnZPYRq+yIa1Fnx0Gd9qea -9upeTFiQmqX7FgybQPdeGQMgLO+AaDNtgDuMmwG2tRqtxoVBA8X9xK8zvx0Nczbf -fB9CmqiD/vx8s9fI2HOVULzbpOFgZdGK4vY1G2IEvE/9hS08uoyH6uuUWDC/5fuM -lrMVUju2iWgtz74XiPRmU7h2CPsKJtPD16qKlscBEvd6Jm1F/cAbkO1MR3kZJLVO -Z3bFSbchpdedqAM4pYsLSKHVG30Hi9U9pDz3P5L0Z+wlecj8alXT9sFxwGJX9oNG -TbEzxy+9ftrTVb1C5IWFYRoi3QBrF9idebLQA69W6u/xtjLfDcP/GhElKagdyr6H -wvCzFQMyqDMa2soy6u7z5hjxohSYIlMDvx9SYm7WExUMzCu+gl+ZbgwQDi1ezKVf -dZ2FYTEs8L0CNIuptxVFV3IRPK8DD45thxodBsBv6+QYBKisYHzo+Cd8Yb/t0saE -ikDoLIqNRgDBjdT3M3lvFYaPEzUtKyRTC64FjY4xuSIlGFUJUA9JMGTGZWjV7oWQ -7W86VEtoPVa4WJ0dYN0pxbpseBqybMTMuHfAayv2B3R4MXMoiTmKq6uxdMKMdIvU -JUXlHmFpttAJRRG3/KiRzl/KCeaMQBxeNk2iPdCSM2ZQJiLUgYmvcqblqrOYtdmW -sqkYLBrJKf4wXxqEDVYvW6/1cfOxLuKSUU9zwXmMpyY4NZmNPicNt7VmAN1n21he -S30GZoo69B8sK0pQJVL4G16yn/H+I/eqqkMkmqITpaUmMY90EaxhN7172EheTbkt -OJdUVqEsHnd1qMPvMral4ybsnBiVJCJX9D17AHOKuAu2LYBy6WgH+akZEWY7KBy+ -yhwtlqBrHAfRw83LXCKavpUZ+U+KfXcmdiewdzLDlYryRKamCMVbuoFNLLX/EnDU -eEz3fDocdTT30ckju+/5LS2O4Puqqf27aE2+aWqfpfdueLCfS5TiGYlyYzMsaZim -X14b45BZd9JnPlVbs1DWrQvw2Z0L9VjGPRlbKoyb+m+uso5SUf12YVV4+foGKn+k -m2l1PkVPCpvFG+CAWLq1S5rV6SgyWa7li4oLlJrAYPB2266ZtNz+zK4pvlIcMNGo -YqWsDe91NNeOik90V4W3b17/SbA3kEBFSLgyTmyW/IIoXtyDSsXKWRFt/3DwlMwb -8yKkGLelowUoCi18q9TabGq6AF4M7+daBWkJ270ikxmwTLLEESm7p7T5ngSa9lXG -pqXqZribQOL5vcY4gVyPnk2m5cI1/7PlqrT2Kn4R9g1M2A+ONrAYmx5ztz7csckb -5nZvcDJF0XFgVkOAi4c64delaeKm4Ks04rjU4ZfOzNIRoa7wMXeO0tRMni6Zr7VM -zVjWDfkUnV76MAnVNcQg00nvSsDsiTE06OIk5ksHc8GRV9Pz9Rngw/YhdbVz/bp9 -1PfDhknB56HELIRmiwWuJf1DVGO5lMg3cx3LISEE1Jz34b8dV1jEaOGYQgSYqv+Z -QuxxVftBRjwbF+dMc7uEVPWyBiWE+xoyioOXeFsAkFNrnGADvyki1qopQPMOll5I -fqbjR6UwDLntTWTvU162VdtrI1dEZPsBpySFuO7o9bmPgF+sQGYQpvrzqdhr8APM -CFZVyUqsmp6PyPRmLxB3nCVk7vIv19i+Bjt3iwLwH/5dzc9AaLnleIVwWbKvz1ch -FHca5OqqSU6hux9nPtl1ES/4Ysex4BKZ868pgFdzHRnssTKvkWEpbH+6FlOYS7i4 -CHEwyDJ5yUpwMmCEWI4V4PvLkCZZZfWau1lTgF4gKlUVELhKjUJV//wunR9fa6B7 -2Ujpcpx4WzoV6IacU/A/POc5K3LAFL8TNq2NMmLp7NNZWNt1ldREMLuJK4bTF5+p -AJu+gnNUWMGfSQNr1cURMvQFBZqcmwgkqz3/x1nwh1FNiiUCMQgsPxXmFFY/j9rn -/KpvLw5CgxBohPT/H7ESvTJqUg0ojbKBT1wJhwseSH0XfC27SScnNe5wLglxiIQC -Nqz/erFNTQSn5i0/1MqUViANl/0Tz/BVrDYu0rkr+BmLkrBhM7ehoJ2rk9nI+VKi -7FEigsjcUmY+AoD3Bnmfwc1Y4ts6W7vI9Acz8Gu8uREUNUrkzhqT23zuZFK6ny8e -GR/X+zsonScEgj39kXavhTfFZIFDqhsR4NlBEQDDT/NizQuLDb5KGVqWLeqbW/Nh -TNrdv82e2SJN0W2Kt9KX9u0rr0UIFrpJ4zSM2ZDRNpKv8S5t7gTsU5z4QVad0Ef7 -G0FP2mp5Eu193XyAlLSuapscqBGuScMjqdUMyeePlAxkC/P3APsddAGNKA0TxU7H -/pm0+XvQvl1LPqyovkSP5k4e0K1wblKsRIye6ev+Riey8ZqQh+x8BwuP4E8e98yn -4y3Nh09GQ28pj/bTGgktHOqoB6TWNuAtICIG26yLL9GpJpRKK4uNo83JhIpsKYCk -6KRA61MfsJP/72kW0acvNdFxyt6GLoUllPmQN7GKsomGG/wZGrUoKZKnUNCU9PlT -b3oKPjwntp3Ula8bm/qD4+FKlDRtfcrbHERJo5uBTvMmtocpgoYDS6ICVPr26cWE -CjiVH7hFGXG3tX9tPbbYArg+Rn+YrA8DH4NQdBFvs+0T7tWD5XNNcW8WyBbxxs4h -ifDDz1OeT7qy9+u94/faPjlDt8kiRx9dxWpDJ6l+Y/+QzA/l4PSTfOpdrkwgwg4l -0fFxI9yeoP1q6gLlNdixcvUwK5mY6saqSkeiPl1FDV/j3YRVq6q8MWYuMUzeLIdL -VOTuaOYHAhm6e9J93ZC7Nx+kQA6y6CRvM0DUFUI02kQ9rNAozSru9mRiGmkAOE2E -CXcKkllAWVHSxRt5EF8Qp84IpHGXhctImwwK2KAzIrURw/HuoqHDhtjgTX56Cexq -d+QceF2f83a2THJyG+FSy8YdMRXmJcBcJk22mHKbN2GoxnswSlvHXAlDyvylrgBe -KU+qsOzf9CV403K1X72DJNqhHvUbI/oV5SHKKuzolk6jlw93vkbhG9ELyr/Akc2I -jpd8rTpcOo5ZsAAW1Zjey2LSljqT37vtHxevlXpwuMiKmcTeG8GixP11FYaO//cn -moJuZkNNTkBtjTXFei7VTrokqtzY5y+PxEhXMtYX5DNxfYkVGZUzobTNRYZm4jhk -9Ow1aI204A5xaEIgBmQEjae09IKXTJFlbM+iDeuQwqLcif4RyFe37uS5kl7ROLrs -24YcJC7wPpgu+RZ7clTGBZwDkLi5Ch7l6czpAquYtZVdnffpeXJl9l3P4fzKs3T1 -5vHStL86MbL9Uzd6+FuSMR8JS+b6aF9qzu1K5jSmod7uvwYPNRzJUPgZ53GbDNv7 -k5Clb8orien4/0qW57A9oCPz0dunySgIK1SQ2h//Q020zWQ86S3m0/xcpK5hCOYf -uuYtrPZ8TNNAlzUcXe+8kI/PtxB0T00TCNtksXBDKkqKp9stNJ9SyCGXrOz9rsDJ -9xfwVfLoTbjKvQxGNcg08QBijtUmSAaIPdYLm427ptflUo2Qda7g0H2mG0T8TYrS -8yb53ajDUv6oFb2X7hs8dmvu24l8XOqejK3ZvVobn8wppXzRGm3t5S2FYLKXu8Bm -R6FKg7XXxME1KfrMsKfeGglv/XrK8Hwa2VQt4BqsEnhI8cOsw7pYPhx2PUbJ3Aqk -JlxWGeYFWlLc8Nim51O7h3ymLIBTlMHPrQYEL0dBWJ+Rgg4Y9NmgkZwYGhto7fk2 -0MUeinUUYTCelCxIxmESdvYqA4IybFnm0Nr0VLEtnBT37+rty+haGKCxDzPFyNdK -xV4Py/VO0LR0e4qp/yn5naLtsMqL96wTtC/CUYlu6fp9MczwXFWVy/9c2cmIJYwm -/8YMzKwJIBypZMy8tChnScZLTeotoRrg836pghgXmn3SKvPBbnoaRQxzkyGC/7Vw -pY0NpAw3+Qw/0+dxTdTWS+fIKfTdpyQfP1VU6qfshkvJRn/5fanveFZvhi73Lkw7 -Khh2XMzL4hHZNKQ3Msy8daJTiyRVB1BjwCOmSYEuWfsdj9F8Tg1KYWvcEUV52CV5 -B+BsP91tiM/0Slkl3CuXKioOL5W9qXbLSF3ERX180ayla/tG69KwmLnc5dHF/4yU -3tlAg3tun50qjd5eFgmVChloTL0P5XaV7UVzyJbxe3yvZK9sujROy5X2bduunrQ/ -w1a6v6Rc8kTZnXzOLJKcaIWPFLoXVbgA7GCZ++Wxozw//LvqMTuijVcL82jH1Slg -u/TeQDOnr84qS5rhwyzczfsYFNUqYLfcPkzrqykaWxPC6viUygqPgKIS9BL2uX3K -nXkK+dHTZSr/8zn+tirg04YJjgAMpNsJr0EINQ/nGF+xtVmmhmEMvz+AOaTETqyp -d2QehC6Wzc2h2uvKGAA7ZEArTXM7JahbH7rEmvf+O/EmJAjbpsAW7kg59xbobB6H -nUTo76mde2Av0gyujejHehqAf/+e+/xFej5suHO41Z4baUZ3uTk9d0f+AqNJV2FK -XJD1HtOdecyzulN5z5bw1BSkgzhwHbZZhW52nWX58dEDE72iV0qpau5kQDw0MJcV -d/xQ1Uarm/JVv0WYfF5ZE+wXw4Yzp5E13aYVdMzaQapi6IAzdmp7kArcIB+fPvW6 -Z8uDs/zo6aqJmsyoLWDc04Q26b9FrLGJvIaIv878WhIrwjIld82JlUoGix73gz0+ -xBMynpdDej6cPEzTh8FHIXDaU7dwddZhk+Ulxl48YOukUHN/0sEYfXCwxxvH9AAx -w6ymgw/6tByOjItiry1HZLkCmhIunNY/d7IE9wgFteaMbEdwxVZchqsPX1yHw+Uh -YxjBHL15F3pocXUTC4p+xL1gygoPvU8gLI89eWgIkR38qXnz5NQQPNv5+2/Bmt16 -JZlNI2YItNrS/NAcIu/ay+i6O9iltc3T8u0LVgqg/naMoT3QjLo2PFQZp9jpZ+AB -aRSMV4wnipz+Xum5uRP1yy2MiCcezhIsoPn9Ig3A2j2FBuJKqTVeSngR9GLTsZeL -EWaWSNvOHxHkg1jkKiNpyI4A2e+lWj63cyJQFwta9CEVNC9HJ+52Y8UrKIpD5U+C -pWHYSDRNnVEhIXLdgiFs0QHUNuannULPmYNlw3GnOAK6XQcJ8lrVJ34KOKmKWanf -CRh2q/g/9qLBlTazvFbJ0btMjfEMHr6OF07bCSGWaqIrY+4+Swf2JvXFvJMSRdaP -NXbLLYKdjqWwvp3eX4xx9qRZ3u6kW3/WbEmdnjEEStErx0I0Oki/a16rE+2jQfLG -E/SqcwP4nJcvS+umH3CHncSH9AdKTybc2FmU7232b1OkYTHg9c1VUY3Fv5DA3YOs -xwARpaQxVhUxkcg5dXiC0V8A2g80jajKQZSr61kTUzKtZVOR38fDwC1jkLbU4HpM -lKHEikoiYjn/CYAdHonsWmeQEyiv/jrN236PGDvhQRzi78YykrvwKlM3Mg52vW9o -HA0qBHiMiImxldw3M5hXpddDRn2Vh+FbWxw8wwLLA6XsZifjcCWhEpROLJI1odQI -uEh6oZVbEhFcyZiLled+2kt+Y6QchwwoQQf3e9TE2nH2MD4qwjFcZivnJ487ejjP -uX1cQsSv7+YxRejFtkVz5zELL00ylveAzbgXEC1+84j9rbskjqfvceBFwy+4iG1W -XYxBznYdoLZwixneZ/JWFaePJkqd1RSKjAxskR/2DXiUmjI5eH820u1oMPWzDXsx -kpN8AmmccoftaLw/t1yHViwM5jgMG8i9c0Hl2qKSkfJHvy12FkiHyP6hqwYynib0 -zf6Jp3pfQq/yaF6YzjGX1vm+RnS9Wtyg6aw9G4ZrtefKlg5t43/vMUlXbTUxTqlt -2QHeHFWtYD5LYqVY3h0uaMQnV+Lk1TyiPeh60fTA8tfNbOwjzWbZ4Q0y2CnnDGWN -uFW3Vy/zhTh6j2AkmAurXaAYQKXPsQG98jgagVEKPrlXUC37lKtQ3Q== +U2FsdGVkX1986PbRMb2EokSrLE9lJ2+nW9OfuyA0vNn39kfHerFqT6axJCldzuZS +6cIbHi/WZtTpwjxUUKChgjSrLtZ9o4IlDPBn5qxMMxtVLhfJmVDwOvUy9NvAtHJz +tfIOOZvvZzDTJ7ewkywkyxk94JseMdn5/GhiHinpOin29cNYZ6cuxeYE5Lihz9tt +3vP/S211Oi1LGYjd5kfuoSrfr/7Pkdhd/nkyiL/r7yOtPtmSeU++D5VQsbLzYfRz +Umo983TNZJ25w3FMsrbZWR/1EPXd5Dv4+S+FAmZES8YSi4lwmnFbHnnJhB9XBV8R +K/+puWCgOH8UiAdymvaaAlG2rRzu8jQtitrOnSbONOCJn0+Fh/wW53JVsGdLjUS6 +a2ZKP+g8G3KR1aJkPG2NdjG+4IzoCaa5G9/YPP+tZI+6rKrPm3piabtxxerp+N4d +fzwWrmz8CL1ICnU/0ySzIGdKYDnF6oB57vRVWanUYknAFq9s+tofGnt0c/T3X9V5 +aOKuX+7XbLEapuvsR4ghd7uYhi6eh6s8vmc9gFYJo39tcU92M2w+8bz51CskwXDM +WEcYLnue9/yUK3fdE6CMTbFtIhlXDw2IAA82rracXoCEPZFtDSvROG0W5WLMb52J +xLE6DJ4nPAPYAfgcj7xzRzir7WRgclrWrnDXCCXlDcxXO38BgZNJ9Cu0+f6Ys3dI +1yyAagxixTJw1u6Etk9ictbr/QQYWhQwqXPkPSrelPbJ5chQrdoxN2MrPuRUm4Ui +QNa0ug6eV0bsolaqlwCbbxoqmZlf9Aga1ePoFhMicj3Jzy/8A8NIx1LhiZuLnqiV +QRhnlIVUL1fD3HojXxqVyQVM8pqAb22uzdS881gomH6BEK7B+v7gcYKGcglCMaFW +fUqx0EyHIzBTGtwldrymbshmLgcYlfuYl40eYCF9l0PDN9/azw4xstv91VF85ZR9 +5lZ88Q6/3rMBfDS8ZxwEXDIoJ78giMqjHReaQgUtVUzEgXJyTkXCH8GS5S9Ct7YB +09Gf/e4IU5EYEWO+Y6vCXyIpY23cE/mzBLTDichT4L64chc4qUX7ogr7YvEvU9LS +Ga1OOaS4mJHqmZUahGa3aDsCx5Aozs3R1Js5Em83Rr6lK0fQVevCqVagbcIrVUls +vOnugf+0wAo3YaOeJypT5JkH4JlwPO5Gfm9YJ6rvvQTugkwp6BfxnRt91oJzPoOU +LgzbLYcYnersdpXoQIvnUPsF7cPxdY5+rS/cllSx+dnoHzqaNrqOkfhn7PScIerT +fGOWrPd5gH4uOKshc/bTybp4lSgbBEQGjD3HUjuFxyfbt63MsqxB58BV25tzmabG +VJq3Z2HbD8xlKWhoplFR/QW7RpQ7yyuzyRbF9a2M4dwSP51XkNMzA30OjvqzWrUu +6s0vDppVM8iTT9XE6SyGSnKEEOIm9XzXEsVD1ZlE38QJxYo1kl6DMPRiDq+okaWl +Kbv0693AZulL2hHXtQvMufxFNNAfoO5jk8Jr6rRVXMpsRRneYn53cyAzMuk17SQD +X4LfN78mOTc/6qadfv3t7ZNCBeT9pEYWfqhed2hk3CzvfdqceX2dimTNySxcNMDu +ukrG6vJfaO16HuVnXDT/V+WDBmEDhaeadrjDS8u0AcBGbtGxjXIHqoMJf8sxEqDV +cRQuAKaRwQcAVbUoF8pwWLvEDBpw47kZCVm6202FwI8DQngWVHlJDCvDoU86n9ks +9WUSuHJSWXoYurLdXU+0HQPGcwEbjvn9GXK8UyPlF3CZmz9RJT5Wr4Wu4p1ZACqD +4QMW8vvST4yuT3mZEndqrSNCvWf1M24jhap4HY6eKSTdHaEAEhIpnbcaR+pBIiH8 +QiK/hTRsTqV5cFQYN3x3hIQQHGIFXgutSmjuIWQQ8kPVize0qE5qdgzWmCIQtqp+ +OVZ1sux46edT2sAN2cwuL4b2sc8G3yMrEA4L6imf8Ea4mRQEQvf4RD5A9Eq36z0K +SJzvva9HHZK7NIMsY4yRt1GWTyNyzApJ4dywzDc6cLvf42O/NSlHJ0NOANDUQ4eS +mbQT+oZHwCWA13W4XAlaLesakfc2E8KFMyIv95j8BeySp8gnrGz51wGMIeLeuDRP +g2CfvXZzPfea7jagkqfMJ1+q7pI/ItrQ7ccrIwfg4gQL7gEE4trGiOvw9RjTC+Lc +wm7Wj6bEVXzONR2izvFnF+PAmdPfV6xMFrGpHaQzUcvqQ5bHIgNLl11IIas2tZH5 +RK0c2K2COaX5ZmRflPUK32vCkRgCJ0x5b61X66q3J6XNxOb91RAU7BKhft9Ud+tn +gT7RWFZNQ11nfui7kplDaLEmQac4dIcYw69n3QobaQgGEzJyNqdKHFW9dJYRRyy5 +cKuHQUl4xvY/AA+/bL1HqXJFAobLQ0O/eginE0lG0qpqOuERizIUsV5ZDYLv4nWi +6qpxpIzkTiipNfyU2jNTZnSebSKlDJTFwsXM4RDHcecqFTRYwtnGQJYzaUnrRGPW +zkUy39VyT7BepcpPVbmi3PSW3LeB0FmIJ3dMihgiAHAa9fuAFItX16VnNuXuTvYf +ylHa04LNjl1iaqSo7vjejLeNrbUplSKAuhvlFVi1PolGjglX95qMyh4KiI/UYCE8 +7YngRWqYWEjlUK94UzYrCBI/snlwXUoMLfGXzFbbPnccPR0q6AbSf6bjNGns6EH7 +5349eOHjB9kSQsNU2viDxX6TIGi4T6X3Yu9NeE4mbIaU/nC2NjkYMfKDzer2G/aK +xQhC5P1k9LVKvtj4VTQqe+/sfCMvy5hJGvmKJfmT6MzQ2wRtqO+A4xDguNcPc/ti +uduNCcxhs0d4UmbchoSmE03C+oQ+Ql1Hj8OAOMoPeCisBuVhaFNQ7g68t6xVuS0d +YLlqtSVbE12W5mzZ3CRslKDOLzRWFW6vp7AY+eO1vXPg19B0BVLwuSG8t45AIzNd +GkkYiNABfZc7oj4OLi4ONxPzrPvNGuN8tytFtXbL9mAa4v0DagUu2O+4fdnGsShY +FrriLGry1GCd0wWECuZ4TeFB6+qYJs41Ksqe2aK5w4njZABKv4IrqMX3qdzGpzu9 +xS+ob7gzunScYlkV4epfAJIZXZYtf1qiHkK1V3ButA3rlQT0vK5c5UNLJRr+0ebc +DJtlHUaCixQ582mm4rjbu7yOeYek+Cu5Y/MVbHAcGJ6QC6wm5FQSE05pcD54JdZa +9LF6raw+APkNanBW8hJNSFf/ZN17Lf9bkACeq8TKlF6feH6mHOKokxCbQMTeJVLU +/4Z2eRhUajN0mXePITbxiDAvCWImsx0qD8BIX5CStGI0LGK1eAEbQWs3PKrctXm2 +l3RxNMhskAa7KX4NZr7tsd45oa5znMwCKxsLCqLEY8G13fAt6PfFeMWTC2AkrbPo +tRVVFcy7/VONbBl9+OGZjD2ZeOYKy8rqNLxKjwpxjzi0cQWx20NUHPjc+E6m5eiz +pSJoxvydLCyNwgoL5RkjZWHURfIaurb8dQx/08nQeiEtHC8RRxNcrE2nF8u3redw +14LTQNZkx1XLgWxFt/KoCjd/GEDxN9Z9sB9HNYIKxq4RA/bx398SPRYMe7NBJw4j +vshpelYHXb2Mq8jQQBehGEV0cTtr8yHekP2og4EevSXN8bcGZ6+kxADjaixjvg1W +uQL0omvE2rBGxkC5zADmAn4QAbbVhwu3xHU7/1fKFTTjCEumFzY7rvaWz2/Unvb+ +xd+FpwNH+rAzyXB93hJ5ZjmQkkzdOm0YbD/xS0wrPBeaziG4JXAIORSGGUL4QwRx +O0ae7N+cPzQbZAGty7YMo7twPY5IzeE0cF+7MDPirEI6oLfQAyqA84jLoFasQ0nE +aOKE63P92nQY5dxmsWP1YYTiGdRW/vUyC+6lhoufu/KB5gXO+n/HV0Iot6p7dX0X +FfCjBoT8Gco1zFgoa7OuLW5Z0ZaNLBTeYp2j519T0CTKFlXBN2Fd1Cj9hufELgXJ +mMa3Ykey0VO1N/Yv2CeVUR9KlKBT49Ax2EcJmNizIpdcdMJ0oXRSoBjHOPxHwMEG +BCyCh/BhSVOjp4mqkvhVmXHLJu0OV+QeF2A7pKQx3eVCQx1eEkO7mB0JUHFQkxUy +I10dWM77g/MBvOqFNvk8EI3ifeC11l+BXfq7FrX/Ne/MupgJPTllQBEVMGv0+LUE +UXshO4iCaR4UqGz4IN4TLSmVWo+FGGFvfFTd9CocwAf36OGf4p0lMYmceXCL0Ojm +Zr3JMa4XEblDwFQcfjyXxuFkzqGaRjCNn8hXvgabyyyywCBTKL5PLuGYNFsbjE8r +sCdQ3ggh8hdcJAHWMUKvj5PoPOlrHg0ZJLaYEXunpl/VpmJH0gGwP87KG/3MbDX9 +pijHjT0ba58uE2mFDkAz8ZDykdWKwpfoO3wVhZkvsHffxGkTM7hcRdPU9H+aTkdu +wgs3oVAI1MvsvyhWycNXG/Hl+KwgwhJDRbH58kKzjJApfkR72nPmHPeZJd0Ovt7L +FxgOGB5K4MewoJCMSz9uXALStvv182kyj2izsTCH9EcQjJMhYHcXSRra6x6HNWtS +KSqajU19bnWJb1kduxLY0HycVXhema9nvUVt2exmyL0q+0loDqh7MCZqtI0oMD11 +jlcYl/Krr2dSzun5rlNh1Q4ufCvFttwUiQvPakTqYvrGK6pU20LHTFm+AxrDZI2M +SIYT2vGLj1hZEU5pWpw+hiFFDsgWQ0ui/Gu9tNzzwmprtBsw//qmBTmVnXWCa1Sn +60bj9/8zGVTHN3iBFr190W5PsNh867kgX8D+sspSb//JCSCm9H87GsC94zjyGL2G +jlvM+Flmwm8DhWfa1tH8KEoKz9c1YPj0N07NNRy/XlyHP3t3srrdpiizTj3HCmxp +0mxyhaa8zsoMmWN+FabVGHeyv5j3faGPgmSpqc6q7hSl0CivAmUyPm51kfyjsAxZ +oDuP0ijIZtTUTQcW6V2yaMthc9lQbpwX/DfjK0VAwCeG2sQs8fPtUfFgbPa9NQAJ +lCB0r6s7B+ZEtlY7bkg4iOav82/RoXlPLDAKVeLRPYR2/v1hvLlFn5BEuJgnxBGB +b9yrIMb/qu4a45l3gXRfaPWSZqQo9/FKpD5DBWTVUsoayvMelA4KwYltFwinsD3b +SO0towg3JVQLMLCs3xKqVAj9A9Dfnvlia2draldJggb9gAQ+YA+2kxCYl9MeJWwL +sxSXdwO+3zfKnwaoGt1MadXUIldPS+ocWNoQq7Yk2aHZufxawpsF/5TfXAPb5J+r +5pgxLx83gqIFbrOlrJjrh7BRKEbEYaVUO6S0HdGAbkCQXGOi7XAP+EWze0Jht5As +sopeONoBxE5wk6pj/glcr46q0SJ72uqSsf/+nEvD2QmT6o+/fJKqrwSYo5X3K3pS +OCgeunKkqKvWWpXSH5pcy3cVVVjui5dgyBfYsDJtGvLnsk+gCudmkpJGuBiAr0Mp +amowujtzaYjYqsqjdAUj3L3ib55EUqHhCMo6JnykBJyiwXT4u5GSVVDWw7sb5cV6 +B9xUFXsrnT52+WdHaujJYNlFo+eV1dm0EGhWh65tVgwfKHl40nFjYY77Mk2/aTBi +RqAssGcn+ODSuDZuO4Le0HcH5VcdaLQt9Y6uV02fPs2D9Kjj5SszkCnjUAqYanio +L1naFHEer40zQ5hCyKio+Z1AdIWEYRxYHQKHy8ED2zTAjWjk2/eldUvCGC688fF8 +n0GmwJHZ34a5buSZ9Z0rYQnqdTxIsKCWvlmaonvC5QpiJveIhH/WKNQ2Hhjyt7I1 +VaQjnwKOC7qFjJyW+kRNF/gTjw+AiojWeaYsb2AVmKJJtepd1XvznRSdeuV0VAvF +oN365AHY2NYjosMBZIUbom1tma6HLb945PC1WE5SN2VJcp6kMBoRCMkparz8g0aP +Rgm64ecXkTnf4QuMIWsTB1PEeS1ZymfngxUPgDj7ltEpuJ1lU/kTxI6o78w+0JqC +ww/UnY+3c2ZjpFCKdIOB4b/SvpwVrO8vYh8i/75DS5J6Ouva/ea8HGx/I2dDDJbo +DqHDtA9ggJIBz3Z/T15ySBFVeosDWELVNwfF5hSI2J78b9j+4xDy3htkGtFBKhFg +mdJIz9x+N+1UdTjodc1o84fNi3BLGYgUQvK95UrEMeU+rDuDhoWH1kWQvjXqfcFO +DVNxTmtnKVG8/l/LDyarDLGmW/mBmf0pUYfC2+C3qX/5fxH2CLMhSGbrNsrfyugf +MhCwthOI8NX473fHvIc24WqdxK2yYl5NYfR0TGablw42JdUsnmf/30lOm9jIZfqU +EzA4kVni0RkfTzrttGnhcpbpud+a0JMeLT0eLi6hlL6CEO9c6xpvjp+nDj5kE9tU +Yc/Qyw5CvLhsuFWc3uRd2XBF3S0XYsPcQCRi+jyp7S444vr5aOOFwQ89QkT9Wxzw +AB9qH+oZ695AXhLnQV93v46LwxopmYJ2krF/YHqst9lT9DepvOa+Oh9CVpMmOwAv +u4XYrPSOnCq4pCOrd2ZfBofdpYl9jvVgEjXB+53TraThfNXTBjphv4Z9o9+hBVnH +CmBL43t0e6FKYZV2BbZUR0uBpTE2ri/Tw17ZXJnY6s0seDXKXPImiQiSoo1fQFEB +8VQMDSXFcMg9r0Ru8unF9C79gITfa7l17cIx1G2bYWCPEOggn5srcU8xB2tD6ywp +Cfcx1ztwbxMrpCaXbg4yTyd1rzha/LkcuoSU2Y/FbfjKgqafWTHsHAYyeazJTKC6 +JXl8mXT6jAO7+AXQ68dhyUjWs4pKMZ/rSUV+c3FHfDOzgDUHy3K9rDQUfILVv1n9 +HDNYoTgAjFA8OIF4OQIReHqudgGltj0M+V3EtU9yeaWbfiCRTcma08sAhCvTU7G+ +yR11hcgkOpgj0OupYQ34iRWabvDSXcqG1pzB0kz+MWJOSG69t8k1RjBdneTW9i6H +TkF7tMRil2Lftyx7ZckscC53ICCbrrNJkBzZw7SGnoDJNwKfXgnJ8l1gCJBNNncs +SWI6ke27Jr8EDk99vXBsZ1Vql9TWeFAz7PqlmUoWwcWQVtlUZO0kCCGXBFsz0oeY +m8kWpHNnh6GXG8+Q5vOwV+pLPbdaB+/qufiFLKdCDsUmErb9bz8bhVb0foStESFq +HpaH1B0y6fHbOwrTPt//4uaEIKQBvcKnl1EYHscPWHwZ5LB4QDIfaiCSP3GVAlcO +WPNMdCuucmcYv2vxKmgrGzrUnjYFc9pYuShCmZCjkb0VfDROjx1l/j4dDOsCGPpv +tUJx0HsFpIK+2dl3DCN0JNXBJ6PPreHqqXnq2kYkwYWZjPYnCnUKAq9A4eTuxmvz +bQWGLASSdXrNjNbTAQi8dQvPLDrSK+Ao3c3Ji7UT4sBH3CfawZhgM0HzNum0T3NB +5OHZ3YBvyYY8PNCrDipquhiiH2T21X26FKTGvh9lSBFvF2QOSgYHJ+uEo9X69BQ4 +jpqady1CIycSR6nd+ux2RJZVoQ7m/r8jQ+gfawd08zZdTcI1GMpoh2/gRwIJ6815 +GEduBBlu1gYLkM5XGmGvgCLnP0iNiFQ+UC9E3gnPpMx/NWV2eiVnI6M3DaRsNEt9 +rIhR3ll719lj+IdXybaVInxv6KwT7VWTVYmXf/DOoGplsC3sUQcQ9mxczGIEFKXj +eompVnbLjjLrB4XRRMQ/N+fCrdp+yplBE35RUUEQuYXPelAniO7zNRF6h988PK1L +QNJJxSzCveKxnYkUZbAAUJk0wXliCW38JlGvRsT/3LBZkd8BN8Cd2Hp457Gg1Xsp +YNBV6FLiSMJtxaJSojYlYkV+KvjwaAGMw80WQ7klAyA77DmBRlyDLorug64QVWDC +ooyJjkRcdZYe1Y+oUY1yREYWVA94F7zeoPCgWJlF1oZ5W6Ampc6gBx77yTZPz+WU +BX8GFYOP0347Dh2+OLEIALq2itQsw1/Wgb+tUa30xcUMjWdvadT2YYVGkTm5ffwS +bpYMTzhed+Slh4pJd+I86HBSWIqzi8qpN9G/G/X1iMA5ZLAFoK666lmInaFvsi1x +Bd0hoa0VXLsXKITJRXoKgom5E0A/0UXslreA4EdqAoS7ce4dcF0GbbmNC/abY4AZ +cMWde9XqUhW4qnl+A1CAUX11lwXiJjY/wfmosIneHGGIcKOYqdjVQuF/sZgsirM4 +HneU5zGVrxW9nlvceUJjEwJFvXVsIUBds9LVY/GjD5eBrd+waGO5KziAn5oAGbjv +cRVo/bqS72JSpx1vdVGDghoFJpLhfy62hInWE0ST1Ggmv4EYaxplp+KkT092F+BK +d2q4KMJ+dgIZ/zBCtB4OsXbpVLZLUO1cB4N6J4w+8gbKLG5RhUNIRycXJF3p0Mf0 +FjVmI1lCDi4m0I2+BfkUkrFgHoDo/DUsgSffzlvspq+keYLuzlUeZGbfU6QQuKFU +8yC7HdDEc1Y/TylbtqdCWslZisHGxsDaOWE/qubLA/Fb01V3OWk5f7ROfIenyfgF +HWA5wcRtUhhPyu1gENQDI1tXAN0quOgbrO0gjApPL42CR5VSUXmBmsNACtm8qkeV +Whvjem4ei8bbW+tUkNGidJMkArBBgISudFFLaIOymX+RV5JVQXSN+1O332VGT9RM +YPfRziBSfMgO+7OXEWN3t6p93KOZtZxoX098PYLFxNwSKAXMHU8Msscz1AkJbRTC +uZw9lAbWQd2CdJW8Us8gegujVftF2HCyj5XYeWzZvQUDklb0GhURzh/Thx9HbuWs +p14rat/v3NoHaE2WOhJkj4BuU3H4JizEMq1wMFksNKbtsQa9ms0UKH8jmMnZod0M +xTIOoelcOObeEoEztCX8lHVXYIkmaVPqOU8EPgfLk/O5HZKtDHuieR2K/2PDcLyK +uMM7hoPTZI8LTXw09bhML9dgJJY0xKycJbWcBsI29VDvwaEk7pztk1RcVFU4m3nz +SWnhinsCRkLZyy3FXWtNRaLUAJWHfCZdjrTM1OuJio1Jk1jkm/aGAESOt9F7++Qv +316WOZZ1cdLq5HqNlnmvZswM5xyM3tDt0Wjnjx93G4m8aDtG1f8+5+Q8tzeegmAz +9ksO7mKenJtX+9vCIkyit/6SWa+EZmwNVfpY/4n/xexsN74bSuZIwDEDYfTcCqbd +1iTiAMX1Kt02XhrLCW+MoZWWy2FXyTrR6JEp4pwvwzeDBNZpZrvM0Lti0M+cpVeF +7jK1EAUXskVL+wGTsc19O9Yg0VVK+o1h7GVRpogfHcPHPa+3558U7yb1S87p3Huy +0g8FX0O7AlUN8AQ4slZm+eXPOUmW7DAZ09RZwrUKih6tozpI+i8cImPo1WTQxmPX +WbLu1SbX4cX7FRLWci2puYaBsZifaasY0J6K6rkqzbMc1onB81QLH8T32VrdEdrT +a0lQXPbGMI3MLIAMxAyLHmkP/el60ZAQyqOHK89D3+fBdJL49cuPq66qdRmYkdqK +wJBqtJBjFMAh+WNMMzezV+fCI3+fSpZgsBlLqdUq/COz/8PdHpFFimkcjM+V5nE4 +BR/t3eioEfU1XjUZua2xrVKw3B4q7UpnKvbFQVKnzKOT9mEta01I0HjhLAApuAPP +G8ytLhf/SEwm0hrxP1XFN2+e2WXSOvXmnvVAjIaOxpw88yZAbKRhq4Vhdx179G01 +aXec3xWjrVzVqwKGbqCsyOukqmj/K2zu8R/eDeRDauCv/6J/JzNoFlBOp953va9B +PoAADmGLkUQJlWWtj4KKJTZxQIMhnsFs2FuZCY+7WGuZAVWxkIc0P3+SBlwIOKBI +Ob6KxQAI6K2NgDoLnZhJ5DsD5rDM15u7C3uWm7igjUVf1IOkCT5m9GZY3ZLpbLbB +3YMkOvCAR56FntmL8BtnRUUJ9cDZSvtaJhjq9UX5KCf5S3zN0+GhzLRRRuaTWT0Y +zF8DX8VFU8RybIssW2gM5DWz+k8UyhFmfpXObwxjNEm0ssoj3IHG4J1j2ssnPiso +WFO2W/U/dC4oc85mo4mvEcbAV2QxlYsSLpIdYzRfiQQGt7BmFOaE0saIqIxSD3lJ +6FHJsK2PA33uxRRzwgP13IAziOLdE6jAf2RGzjg7SrGc/kId90Kfn3C+oDAqOAkB +SikAA6SudIwEgwQiIA6XEgFEZNf6yFj7MbABgpD6pzJZiRx0b2AL1UzJQMnV6qPU +Y2WduJZIKmnwMYkEhVEVjUVQPbvdfoepHVoW1U+MrrgxyCFmQpT+GBDOJ5S4rzQ9 +kL6h9NJBWs+IRFbQDhWj3fXwhgBDxJggmL2tHerhEl2i+MVpEmgsLkj0ODGwxz43 +uBI9mrGbSf38rKIXQgInaqe3qQRV+gydpoatuOH6JOREURHUJSv7vt+glm7vUTOX +55JxxUorIEqDPD/AQeUxFNp68V6eoY3yirVMwOVG8VGW01t5KpFZulnlNzKKjGJY +v7Mj+Du3gheEpM8/cdEGML0SB7yhFiQfEXWWZNuyDaqLJTVqgX0bm3LB+Ir0rGxG +YTwtiOAZRCrBdXWTllTEf0XOiWLEIwbXSOicLvKRXNVE1NBLSjTVZBjN6IaXyIKb +vYB+NaAJEPGs4BqzpVrQFtQpoYho21k2s0WuJGtHo9leofy1LMwtkDjbtifIWXE3 +devuZ1CtpXKfit1lrX3g8dy0lxKJHJyBhMA9yS6aN++kecb8FdgXQIaWJsxP3ZsN +VXs7fc6w3nLIkYozGHpCB6GELolyQMfDDSt/yDSTcD2oRwtiR9MfjX902MrgVIdk +lVdcm7VvPqX5/Mvmouh/KEu9oAbZDxNbkCOJs487qJQ6p8ZzXx0vh7Co8eu3XAy4 +gFf2L4DI7O0q3iy1ObWZB+3vVvDzUZ8Jx8H0pRRHKSf+xito9XMuN2DpDrCzugEn +9x58sVbwnfdp+m2KjA24iLYLOQ7usb37jaju22Fr/nunb3wEFfBmsJ/cJzk0b6nD +Byilc46rD435al0fUAPeOZ5RqNzAefJth6jiFiPe09hi1bwUJrMNI6yekvsMQI2C ++VtW4n96Q1DVMCX91A9IYuXYpGCOBp+FACXMO8nEmCnGcm3z0HPJ7hIFV04/dBnP +RvCqNZGxjyT2o60Hq/rx0Vjdd9YRXEuT3ETlqIGgGag0wx7xWxPsCWEPxc0POZEw +I1Jq31EBXjzqRMsNoqxYkiu5yyLiFFrJ07l43qWDnHVEQiXSXlF1carexMrKvrr5 +HkCve9glK5Dbyd4RljTxinMIkXhC/IiU4g3SKOqDjYP/E0GOCqRx67S8bZ/iZ+Pr +yRaaPTVrcb72Wf8PxWDNIxl9DvM55vU0mL+/GJKu4+xZga8Gyr381fQ6nrqoRA4m +Ke9KE6bK+2N6RclMenfPHWeunGzHTVqYK8G+nXxPqEjDuKFMZmLdH8VSvb5ZrWnw ++Jxx726BOegl0F4E/9/qY0rqZQm2cMaBdrkkaIWjVe5BdZJfKyEk3thDkt8Dpzz7 +mWX/j+8KtIy61yAhevg7EMQ5LKqpUgbIqumtiyWYqkzm2gGiSM932velx1etu1C9 +6w0LzBibNuwNljpK8Jw1GWHTlAGm3hk32Zvpn4/wYEj8CERiNWV61aqZWZvUk8e+ +/oXOoL6c07EokMaBuCWuxo+/tMTEwP2P7Pj95SAN68lvu5kxkngVI6Pc8nlLT0Ld +X/dGtqpLzRvnd+yKP+XZmjNcKqzIMEN223h9HP3obrTSKGeHrsnNJCk4jwmfL0oE +LMK8Y16UN6wZmA5be6yA+aDmYV9RC5VxkgmpqJOz4mgxWwMCbZ3aKMf6+XRZgxS0 +IDmhNL930KuaNbdH1QSmc7qNNWMUz5Gvg2FKnsdBqw7RGxdtzBonQS26kxEmzNfJ +ghrEhC/plvsU/94zygto8whgSKoiQM3IS7U1FN5PPUjOsYqlpQxKmRtmgTajL+2s +MmhB0g/glCqxeK7g4OZcevyiUZ6JijCP48KRvQOnF12TE2aR1k8yeJyqk5nYixpu +9ff+cagN6bungJXymZK7kzccAF/hE/haV/dekliZjS/BBMSVdjapjGbQoQnn9LyI +30kXV5FCuiB4xwr5JG/ZAg+NaE0Te5zXbvzeKSsw4VAvuW8GIzmKGCtIyfGMuzCZ +MTxB7a99BsD2dJ5wbSS6j8JyYf24Jr7GzFoFadSlVvQIW/UR08/iekanboMu5lk+ +o/b+Pad8JCbXzHnItMbq/m8HI0ev9ZKE6EuZ/0Xne6tf6VtwZdpVdExTkFkVx41F +nWPCg6axY7ukmcPihvtZ8sDXAfL/1saVMHfaR0gfckFqqClLNruDn4b5hTRgfwgI +CT5phFswKva9uEHXJTWrXbjeujcFbgRTW7/qQkdmkODZr4E+UzJVxupax7CCurvP +4KLBUdOJeFWXtnKSlEhpEnK1zwTiS44yex7teLtOP04Yn2PlkeGw0F9MpovRjd16 +di4odTWjE2ht4QhjKyuOWQ0Kn9qRNQIQG6xAMLuSpuD6HilZ3k5ReoX1s3p0uPCT +6rc+yQNNKVLIfgwbeJuGBM3M45GJgdqOCOEQCAAyW67cWK9F7hPDzhbJxYshAuU2 +aFW3ZiWDV1qgc76k2RcQnjN1N/2hax8ZUOJ7IIQCHp2goyGYRWuaQcSg+wMvJdmP +r6+2gsB/0thTP3zqDBVXlx2xQoPxUAliUwZVH4Qv9VPHe98XTRhvuZ9lRflEn6C6 +iDG/Y4jTI7df5IM4hJkp60+S9S4HhVhIZk/vxdN8HHZ0/Skoz3/yMLMM+8UHQQ7V +uCOfNlnF0HMm1nL0sXBnmlgcPpYZ6gKV88pvS02J1nAYAC6KJuhSJudQPcQvUgvh +PmFiFnx0CuaK1EUhOhpuESAk99Yxeik+iPJlEdKv7AHMFGsF/vmmpd5C8uVZuitz +LVpXpo0Fr6LiLfYjV/Hmv8EEsBORBFIEpHw4DR0lAuqspbsTR42w1mYx61G9RKC7 +ANQPA9MB2C/4ZP0qmQBHCf5mD/9LHhicrDatmUaIu/a5/aMOjT4kkZE4NFPKqJ+9 +KWFYyRGFlDOtE4PCA1ydtCaIP4SAKdnZIW59f9MPyXS2XxEv+DkXBrz3P6HNuSEl +ZY7Mrn84t4sovlKgWNlWoKr+B4iQ/aZWkICuK6tLaBE0A0n2SNtL0drb3tIlzNlu +t9oi1lp+U6aha0gbi3HjHArCJGa5SBxIGIjRd2RiKh1vRMJcX2UgUdYLa5w7GSa+ +3nvYjZwb4bb0NeGV4xDtmh5u4FpX7d746+9B+ZIJdgxGOo4MESGUjfnHP5UQpcOo +id5Kv3VR8d1w5tOJQ+6S0z88eMMkJbekhaR0ITqxiD2LUSpMoJS6UjVppz7y24U6 +u0krSJgIpVjQdB65lJ9xxkrW/w11m4w1ngK5nETSSKXiw8jlrLHbm9N6D2+zVx7d +VGIwl+npyIZi3gXklpp/NMIZJjDBkWRxyRo4kSbvc8HHq8m1zHdjUGs2Ngt71De4 +oAyN8Aibx5MAFRpHQNLZTJDLRB4Ro0QiMXEFniXxO9Fl26KjZHyigzoyH8hfCQms +LRCFVarkPAhK7aGyvoqOnExIIqwROa82DGdWzyUbenwRVJkmFFJSe3NYO7KboGNr +iXJs7EgnIp00nWz8Pl0jOBGdFP8e9Pjt2Cjxphg2WbWLxwC11eea1nimiqTIDUA1 +35lzIFI+qvdQ7R0cGPJDflLeDuXBNuqmKLpr9RFXeFpYDkGwB+RIB7tOUzDskKqb +iCCJdagossRAHsfpOFU5kcmPXgi2R3IxkKT7D+nE1wVHbafuIOiHuF9MhihPPf/w +E0jSzpzsCh70Z76sZeOXwKMIYeqB+Zr56dawA+widA14x+CQ/SHfiWMtlJy7yUbq +du9uezQwO6Gg8H880OLAqYbLjSDRyL5YyWM1QmQ9btprxBWkmZK2Vyo+kot7+d8B +T3jeyeFLkYeZ/W0cwmf3WEf56NFyEKLG2p4Zw+uqLL1K9LcJycSXU/hDB02shh+c +w4H1rJwZL+CYka0ojJIWimvlHsT+EnUE+y6nwEAgXDcrH3cxHQzIBTBk4zKcMK8F +hZtRLI1l0M69dJGxmjSRR0N4n4lTGsHiESIXM+AjjmPOca/UP2OUx6XBE3Bq+xBu +UUpqax0ea1lzzoht6FNdbETzWtPaMd9FeSVwipuLuZJHPb72KBVThraCMLyZ3zGq +piakpfy1qRzvpDJJj1OOzp4p2FlIRefl3oisxFCYymunHWndTQADi+1cbMYo8d2H +CvsRGsPNl+CNRM3Bv5ZakhJb0edS6g7VeLe+Ow/dMsAYs5MC3+6WcRDcaDMo7rQr +yJkSzpqByFm36l++RlDtOfbdbGHY4L0uShmEZ27awpeB8uufgqIPqXEaLQVAJYQN +or9aIEA1d50m/MXlCLsdUnpDGCoVhUTGPDf4VAAlkdkaWq6pslIpzQVyDyA/qvT7 +oo5MHzK/a7I7pprryPjF/CcUUR+BivIEn6viUzhKfBUjbY02AZxFp6hJaXrJ7Omh +J2Clxbqa5U0ZduBV7XeJTeacWAZ+GuA2su+9fVXG+vo1WDuIbOcLTj7cFlfwh8Ed +HC6RoJ6TbRlH2achWLWOny9xGXsYzticMLR9EW4lAatXiggxmAA3o+LFmnOR3vR7 +U4MoGYmF5JazD9EQwU/rqOXaDawYY/uJORQ4b0RjVysYUQTGTVePG90mz++KrC99 +L5zARP+hJX3EwWO5Uam9cAFk4TQWnF3+2c8IIdmE4bx7v8PR+iD17KPvizzre8W/ +NbTbPm44fQtRjWIaes5wAthCpvN5XO8rEzWk+9/O67nt0dne2LtzwOboJBTakGHk +2Hq49Wbd673gj5F34RhiwfujFAShyYhAzJyyiwStnOR+sa4yyP7hPDg+2KQAH6rX +ShXsHFz1v9Ng3SgSi5sS+pE/KqzFNn7M4Pd9UnNxByR+uIGHbnd+AfIgOlhKBBoc +DSodFPx8nz+VGLNduY/dnR8G21xbuULrq9aIPbsGis1PoNJcA3cg3AMrIXhyxi3T +RS+azmw0dltctVEMDpg7pnkMCNS64y15evpjJgdjTj5QChU/Liwbix1iY6phnMFe +mf4b6TbKajmPoKZnC9eZc1Ik+wa2lLx8wlhfzjNMNhqHJpM3pQH/EG4znLKSXhOG +e55sdzwPxLSutKCj6SL66578mgotLieN717LmvPZuYJmSmENECfqgv9uktaxyMma +uLvpmQrALvM2hdt1jh7vxDHBpJzJXRIPkWOchE9DttfiG0n7tD6xAGWM6eCQ906w +cHFlprLy/xNtkepQFByTHrmDUCvBMcDy8Rbu9G5/NqtXbalXmCHtEEPKlPx1Vs0a +Aw8dmY1HNLfkOS/Vwkzrf2WQAtwvbkMxa/Ja3AF5sS83wYOjQYL04YYz9z1d+GRN +Kmydy0cuLuoa3+u2sjh/WxORG2ZD46wX1QTLJCom61aKB8gbvYFAKT63O82G0Yx9 +5Gm0SQb8Q3DLRpuvmah4UeLZNXy3LuNW2KTuKm9pO/a+ogHglW1q0jWW9id3X0vQ +f2uFkyD0LD2Y+st0YPaXFaBVSshVJAdpHVVAnXioEKM4hc9RAJ+8I0HxXP4Bsdit +gh3g7QaE9oRhluG0sA2ryQj5Mct0639KrrbEmic9uzVSif+vEkLTcjwKrrN6wH1I +oUbRCy06vj65u92OiU33OJxFpQNnFKAT9CYi1P/AGX8urnItEPH6PDOpqkPbVOj8 +sGFXyBk2gbVc4hGiRnh708ABSp6djx6WvHJiuc0n5cPmcRObCswx4IK6H0aVN/lc +fc/Nn/DELgii5KZ5JtLjIY7+kMZEq0Ga30l5W+RrNHhw+wrRYafeabdHSjL/3DXt +wEN51ZgFRoSw2OlLVz1iXcWPKLc0tqs5ln5ucYTj1hmff1tqGF691Mbg2DgDtKqr +tgWd3TgP9eF6HUL2Ly0BNMOhrqmVffFwUcZb5aU0jlafUCec6wQmQXsMj28gRAv0 +1N1zllXI+/InxaS6p2ZqGS7HhpWguJ8maXZpNjdwOWe+4tKT7bHsNgi90Fz8IJJY +Pv5ELHGgiHdrh0NOwKCiEa7ycQ2gSuvrFB2zPymznDOW38h+kEz7Lkt4N1NMWIBR ++Lf2GMxS2Wqr2Swoc+TtyNgDNgNQvOT/lQg6/f21+8BxU3P2duyKQ5IPsG5hkaDr +T6F3+baDjXVgfiAc6Z3pJyKG3hNzO97izTMbaY2dFZlNTDAIH7NG0gINZzEc6VNE +3ccqMDRvXkHVJwUschMhvW/GHuZf0ry9hcOpsHBd+sLkbY5ya1JV6AJ59wX/86P/ +GcXUXXBbJPxnN4wjTSvX94ApfjSJmMfPAkeLNktrpqvf49kSjmSmSzVeGHvO9yjI +k9dT6eu6pioHz+S5D5PI1zG1VhmMAvnOyefMPEFlkr1u9l7c++y9lQ4Jixsek1bC +kQoz6ikEN/tYsZe1GT3eTrZKXgpSX99uRFrdwIcsCRC9IMEfRo8y/Zy/DkGRiYkB +B5mgFgvfmWqjUVRI/vNCIhgabfx7yvMgcCc6U1lxwLcsZZKkj7zaajGukHhIbKy+ +EZAKJwTcLE/jkvuuyvGbGvuvqEI3+P7wXudFgMLc8vI6u6Lg07xW0Yyk3LKSglYn +N8D9gzVoOqglGHBqypgiNOtns+xPsm4p3xin5UFmysb3cIkHYcoDiP4Rj5S+rrfn +7Mq25ljKn93xKtQJzOLSF0dwWN+7NGAKP7yFBfyTXVp6uzJNcjHUfhcarfS/rvmT +pwEEzM7S5+Sv8+veOgudcSvF917baai/3niqqSc6tHEQm1nQNg5Mw9Iy2qIxiPkB +RKwCTKHBd9VG+H8ezqVjV2hEn9qo0u4ll/MT1wAoB4YIdWQou4VSAwrx81rf7QK9 +uppxed5GxyaLjnfOpxan1623gpyk+HFw8ezF2FpVXLU0hb3FBlUjTAbEBiM/yEfk +mJa1rZL7VBA4v5e1uUwO77P6ibaFMkd3wgfJrq93jTP1QEtXBnMpuGrQXSEmEy9Z +rjX7zGPrC32wYCTTmbfAwEetd2XtcrgupfbWm7fRR0ZXKKh7DK1Q4HpYZyd8f8ev +I29GLssQjiyTlj8q2Z4/MbvOq/nlgda0w3rrbHikxUlY63eVuuG2wKuRo1JVF+BI +BDwhlk6awAvK+OyEUyw7UTlDu10IEzaaN3oB6MbDnSD72e6Gd9vv4kja31KLm4hC +JckyLk1ZEsnzlP3GE01PtxMdjt/HXUGRLwEwaLxL550d252G/hzkObnrg+VhnV7O +qz1d5lU/pnDqBernB9Kwbzi1Tj2zXkort2i1q1t6Lf3ZX5M8htM1osAzbOIllMYa +/1LCED8yM2ZwOQYsY1R5fAzn0vk06c+FEvwxbBOywjpJaQSys2b9sxYMUEPi+xmX +xlDPPI3cDCeBh4GrDORPQwzlecd0pEG4+drBJsm2/sQfpz1xI/HekRpmidxCmrpc +9Zo7nNV6taI5Ob35P+Yez1rMw5lHcm0AZJRD3bk95ApLX0g3/n9FzVNWa9x66uGq +vQP9YH3kpvOEOpsKRCxIngaBUvH545XiQW5TAGTS5RnEdEL5jktVI25v635fA+Ae +Jy571OspVpxiJFiV2kux+b2aCrGQFMpGRDHY9Jrf7eNz3NMtzAtkwjUI0VYl1zlV +lDjdScpuYM8l94m9sUzipjzUktVQzqhO9Thjp5UQUDe30bTkH3YZ4A== diff --git a/scripts/mount-shr.sh.encrypt b/scripts/mount-shr.sh.encrypt index bf6f0a02c..c5ed09837 100644 --- a/scripts/mount-shr.sh.encrypt +++ b/scripts/mount-shr.sh.encrypt @@ -1,71 +1,71 @@ -U2FsdGVkX1/HExlufrZrqJxtCMZM2UWp8ls9HQt6jqnVlh1yFSm7EoWl1wJJ+hN/ -ejTev2LerCr27Jz/SLcpCscFle3SqKNcR/eioUtlqVMK9rHpTgnQe70rPw6M8x1M -w3ulxtMjNgV4a7LEspcqf6ZByP0sxjSxE08D69XZNMpnzfZukjbjjB5nBst+pxNK -oH5AYeQl65QcMQHZM5oG7DGvZtz331asBqboV9WnRTR2B0ExGKPqyGDm/P1WvRo5 -nAYsCM05dQYs7NiFutfBD6PjCKkAqTakBQbl3UGxdLAfEnoukZwb7fG+2+VsY47I -1eJJsj+TmllNYCtWxujPVURbuEeexOVWUYUIGcwNwLoMF3YIwZfm3TX5xeMxfLUH -lsR+yC8KI305Z3jN68eZivnmjyZO+0nMLUg9/v1wSC4lsNRO9vM/3zUUqNGNuMKM -mc2Oulnfm5q63FH+bZfv7/wzocRzIceiEffr7/VGSZJKFG+TeXM4gZQHNRA8axzq -EIFdsKVjoBrn3tqZGdimsTyeD4IJZMllzIbLXphcUd8h2+xRPA83weETPugFIwPM -cKpRuowZBCbo2UN3MynHJzwCxeZ+Hh5G6LAG4HCEgLwnPt5/HEomjGEQc99HuHIM -Oi2Y6LsWX5TEqyS2Q53LcoiFsdXQAoYp5CJU6boa8NMHz4bkUUMxSTbPePdJk649 -F5YE3wCuldZA5SdO5jpAeh3dsWV9Qn455IGYl9SZKuL2D9XelR5L8PZLLsbVY2PB -P/aPJUWh/H60ntjnzE/l/jyRraiegHqGXTIUXjGopZqjAtng4hC9awswALm2nd1x -TPHHf6DH9pnEQ0Jn9GREnolTinDIYfgKPXrymoMV0fIQxlNg49x3lxnSffmpdxMX -RnohAWzcP0OGjdAkNi13keiTo2akbowTqjgI/+ziC+za4ZUwdxtf2MO9Xg9vowFl -3KeU78sg9ABxihL/W+19aQiIiMeFV1Rp3xSmYB0AfwJlY/gJnrP47PAo/+tBKTmu -gHR5F3JmBd7teWoSigup7dz4Wwo6TbtWuJXEqux6WRxo0wOHe8eRjCt30BjIlLhh -CzdpKmxDgQkTHy/oDxC+pqNgh2QjOXvSqy58+3ywXEZDPIJL1dTNIFzNQuhWbrO/ -inVdUZ33AJsbUgfaMplJ8w/ti3o8uqVM2kezSJcDk7OkTirSw3J41N9VEcoKrkov -kzjDbC9BdnKsJ4wp3zZYzOu8DAmw1N4Io/C6tT23zUOyRxqf/zF3faVgjXMcbWLS -M4ax/JhSvOsl5mCNLC4SHfJwZHxwcMEgIVdpJhM6UgaA2ITVUz8yjXyCyZVDqS73 -HtPAxeywTJes++jQN+6rmjZ9xcmXXyJxnxmHoRSuWtTgtm0A6WonFGFMUyrWlO+/ -XZDGhMivylUlkXE6m/tJnMHvG1FMzi2q1iy8VCf9DihGWhw4EfNb6K8COwyKTnE9 -xyuA4xy+i9IopAr9YYyX8btFjiZGmfvctpSoNVxRXUP5lhtTbztIRJTuALeILJlY -cGgyE3Uu0DhwZ4Ra/ADuIFCZVLJ9tdeXauwvbnjXJHma5BjAiwMJMVpAfhX8V8ov -hgF7jMo5sUIxklCjRnug29MEQ5tPJ1v4LodnbKCdcN4aQgGptpGTa3u+yT8d2NUc -Xr8O4KGoGEqWQAx8jlnwQ23HOKgfRC/LXxdMfQARerydUe4F36aM7d0mSRpyiyIB -UJauYnFBrxvlxTNAj0ZA6LXcoUp0wyvcVLCL5AG61X499UvYXhHC6dDOoJfcDpww -rmWG8/zpq0O9De7lh/4a/NwjS31kHMSaUT807ajd//t9WkCSe/qblWAfnHCMzlhz -nhEasSo2rwEzHWvrTinBhgeun15nBrMsuekoJsGGL5HDV3b3xKVgMQAfMPBRqqjo -Hv/o1UDu52HVqvbnEphvmiObhlzU7Xr2yV4BdXhBrG1TsYejquIbMsOnbsRCrzPq -evuvw3DmFP7kHHcogFR00kqSh2rdM15MYm9V03EydlmijmxlaYv9D/xXOocG+5Qm -xW4qkgb0Ar9kZCwHyK+c8xxbSoe3jlgnXnEev9cKfEzux2clNTp52yVf4YFy3SKf -jQ2D8HK8YasctfSXOQZXfjLU6Kp0TBJgjVECwyrv5IGjxfhQDL4vH1lzIQWthD02 -ocohE6H0mVWToMinuCvppiHnigxp/rsmsJ9x8yGxACOLHmbsI0JpCgcuZUKb3jG7 -j4j56fXxNv3h+rPQ+AqwIdyq6UfhG6pej/PHAAuA7a4oB2eb6r9jCiqGxpxOpZrr -0o96OgVT8QGeqRfRCr7qRr1B5mM1t/GCD6ApNvi3AjO1k6L91gi+KpZy+ZlhOgIJ -eJ3bACGWkrwJjS5bcGXehuzLfzLq6hkOBzUYTcWDsXT7mfglCGqPf6dylF8MbpUf -jUsm0A5fHaNmN7yTJNgWgTZYQPJolfgkj8UmgM72zsrHKaVsYItgd5kcaVAYl51e -OPkRpJg4TpylvXmdg+Bd66gSO2WeeiQ5Oqy3fb8JAl+R0m+rJAR+t37zOEgIdHf2 -IKCE0EZWm/n978dvqD/UjHHUfgt0bT77lm7jmVLl8VSDXLcMqY2gYWcfD2A/EKt/ -LyYoiufX/XjoRBzfIYeBx+eSVbjsDQj2hkwLglFPrIVLe9TOwXkbQMMjCoalUSpa -+KQfg3AgIsKaa0ri082degNr8pT/k21GIL7xXNuaSnFQd6dAR2i+wUWtAb481e7q -OHx10F7DNgl9V6CJ6h3Ttacg8bderWDDF3Uvm7vI2syD9/tcXjuK1jBh6BiGa0hJ -lKdSAp1I53CWRzaR7wZz/ZVxzg9ynfLj0TjD5gkcnGPWa4YW3TcgS0yV9JnDo64r -cj1oJKdhHVTtFGw07VKkhtj+G83NVsBZ2EDRxYvDr9xXHLeX6kk+B5Sy8WnjGpup -4OFl8sZ7bInSBLaqVRiX92+vfU+f/yNPT7l1z2E/mxYOHZYOTDgyxwo0FkD1a2me -+9f3TKuCtjd0yJ8Wg/Jj4PRzEeutIjsOSQbfRI8VrncFn17qDjafcvrWiwrrRk8g -15HI5jHATDyPDDHFLqKLwAKhL6Uo7SOyP2bSiVYCSxPMCL2R2lzc72dfktTKIkI6 -zVwmVmKpmvPGuC+zxqLsc9ypAWzS3VNpxhprtTigkYeVi5p4/mj7ablylAp2cb2q -jXknMLumo7zmPuirB4yJykUimF8oa5QLCBfKgMxkRR2ID1BFD1GGp6n9hiO+SpIR -stsVt504PECmXFjQs9sXuejHuCPz+wJCkzHCxoiUaXmcuLpmFXTiKcebaBlSyGUn -2l7cVVS6aUX6qd5Tvgp+fP72JS1X5OS1OAimqcP/+OTJcE1wlUDpRrUd+oHwU7HY -oWT6Xg9u3Q1wCPd0v3ex78Z8RahhYLUYO2S1m7w6IQ1mq2I82CsTBGGO+SiMVDdz -61zl9atrfNTH47WNP9/8Wg9FzU8OjRkAkKsv8elnVdmBIUsr2FxFLFM3LuuphGFj -MTytcyg6Ff3P/vpObB4vSl+yFI8CPnPI1EKqSakUOOkASnSqHdXwGRrL82SECX0B -uddS532hNR2VRprkg4K4IAcmzA24NP+AjfOnk81sVEGW3+2Qd908l6XP4ystJ5Rs -OEwHo9/kdGlYVLiNvnkScjlDolXuTycCQFp4jHgXTphuGyi9GOhAlCN1Wvc5drbM -QnBSMXpfOyhGLuvv2DP4JabpwYi+6Ub24kS+H0tUrDW/n1EOSoGTW0keJ6fEcTwS -vHtsmUpymRchj9b5DT+37/BMzny/7zhqP+U44A9AGv0HdKxo05u5pkI/QQXSDTDg -S5bjOf2Crl+ITHml+SYDSqGQr9S/ii53mn3/9IjRRIHZJz0PC8vGL6fxDCkX26NY -BHQ9BUeObkCLiKaFrK7ppBtuM7OGNvBNpFF9/yDb0yIsq4vXLDuXY5tHOlVjqhM6 -Gzw2UKqvlqLgZz7bCH3SR18cXdV3GLR8Zi24wZhMaIaQb3z2TdlkaZQb+5G7MSzz -zQ7/eFkAmn3kUcxpz/cB9AG6/0yLitegh8YJBQenLAZMiDtxNs5mOk8NIfAQ2CeD -E4zYQJM31i2h2ELF+pVjcFP0d7RXQZX9z+ni8ID3fkqgBOezjIUbdqipf9HpnRRZ -/7k11CVTlpAa4OZS25fTLFbOOUWF/fLGTVN8ltAmZpYis2a8f2sUz+P91KXP4X0Y -/1S9w9EeDCjTB2CCAm+vk1BmPTMppg9KWYtsOvfnj8n6Z8U9zPeRpPcrhiLpuxIp -P8M2/MX1L9qxXtG9r7BROhBv6vf+LFoXOiujoSRszDT7RoqIw7+trYsVYdV6oH1g -PZJ7hKF37udRyd8dKTiv14JIzYupt8xfleiodcXKy17nm8DHd3qajM67JBXTNGwE -vG423n1n9g8Ml3sS4XBPoDew+xSZqqcXkvWnd+FrjAbU57YsoAjpVbrI05DfQrWz -FONOkPc6DdilSd6zIF+taMRzcfNlgRDp +U2FsdGVkX1/97joDHr2PfbtM8603L6QRNH7YOTwyx6+WEU5Emp9YwRLWVFdZPxrn +GfDQXpdmqZ0shGjAPjfhA7ZMVz3qmWZHMr2eCMZhFkoBT4PjBYf/GVfW751LYT29 +7EbpJZYIkXE45VWEw+79zetmPr8xxQ0A7Or1prUAqzUcjSkPvO4iUSvPpn7GcubI +3Pcmls6W+Jd3KrT3POaIPFQsFKmp1lBMQGVEk3/4GGP3+aDhcDqjuV5BmqFs55Ue +YhE7Rcc4nGeCPbKTYe0ouU5VR3WVNJX8+UIvzL0mH+01Ctjn3PcRODzklTtqOeHu +VBGHwAH+NRo36iMw9wYZJZkT/ay8UlVySvKrAQ+13lxpKA64Eo/p20rv3ht84QI6 +lc8JgNzJ3RKo9YCK+iz3UPfXqPXen1q3vto36mCdvP0pv/Epz6PtQaGxXO4gEFlL +jmP6IfGurMa7xiluPxjXFhhb/9I0VDTFk2dEsalu3qSsvabRjJtL0bSW0jWFMTAp ++Q7JH6WjzSJQ29qCR/NmiPjFbl3AYIcQjtMJdziVOksZl43IRM1v6XOVfvo4jwOt +C1iWfqIIcLzsW7ccFqDAoGFwDWBdqenL0KFhi0I5Goi/HJb1pJyYylD5PChOv8rL +OnXH4jDvKEeNnYv06rmtR8rEzngDLt6x0wKQx+EXmEfXKiYbl7Dfp2tFLbVdSNcz +j6uXgIFaRqOHzYD8S0J6FpRz4dTMK5hvKxDfhzg3x91alQ0zQr8+kzU6kOWq5FF2 +2WfEhFYKZTGnL+E3wgkeN4an6gd1mp2AGJm/aTDBIZaHe75uUpfg1Uqm1e/GhNNs +wsJlNZDEB5FjAi+11mrGUUuRTYSVLBVmkYvMbT2nG2QiUuC1tAsH/Cv7X9aEXAsT +oYNn4pAlXmHllws12e1RWaOuxkaX8R0rzfG+dA1DVXzzDNZkNBS06Oddrlp8X7Op +Beez9+PpvMzL3X2vuMiTJYLQi2kk/wATh3DHMP5W+9vHxGYcx7O9G1foFiQZsy0y +sDaH53ge6KNFnS9wBACE0R9vEps7oruTCvxAmvFXv8bXtJx+JmDoFcpg8aN8dqvM +B0hZJjtzpdPz0y/f5cXlkhgGXRORwZir/okdg8ffs5WqLFHZO/MWYdWH5/ws+DFs +eMVlxafcmTvgtMZjRNO0Z1s0TMCwDCBy+mv3YbyFLzBcRZFBuStHfdc3QvOQNJer +J09UpweP5N7aQnivgbAstAMBIHR6WfRBmASkDaN/YBKF4PiPWgOokEywiVXAF4Iz +ph2Z23q6b+fTC3tBYRdPqm55cx/IkLMLJmmOcGPCBY4fv7V9QpPuYZvPu75esCC3 +vzPNxKKmn4r6dwwwP/R9xiwAIPS8ZE0rtD1xzi7KECsp2AMGFtg8zu+Ve4xjJUxZ +HHCcydRxlb76rz6UNHMalw1ywtx7afJdzoAwQzef0tmnfAI1w5QIi5bB9fo7VeZg +SVWVemnTamFm0SZcgZmow7k+FiFsZAxwe1JE0FxKblgpdBqWEUYkfosYsmll6OsD +HOj64MsW4+X25ygJdBkBjFsY3xa56cnzAyIkuZ7j0ScuNimQzNpdt3PT/khY6CY4 +vSkIja4TapNAoTwlTwLYBVLzS0pT78VS8JsX0cRFNCsISLatGD42APeCcGDzXB+5 +fo0+nJsjI+hLTMG1gpkGpZugChtoHMmRBMcHsblPh1QPsyD3RqStpzNQQcM42Xw1 ++ZrVxoeePj2boMmu5R2l3Rx4O7bBDPbD4SvPSbxX6s7dXeu7Hlox18LMA6lhMaU2 +8DmtcdUWRVTNsi7KifAh168qH13M9xBRUXW5n3K65W+v1Fp87mqp7sbXFRXRK7Gw +OBTScWf99JRVzJiu77DsxJqilLo72KCSbVuZkYQtDMnagHukPWplomgmGkaQf1QH +bAMAlcFMm8Oy6EDQjsAjtxj4hoFMYilN6svuau4UwImfy2Dw7glRppYJOtsa/lgY +89K5JntejJbBxObvOhatH2Lybi6xOZIBDueYzrefXJxb9l2Ul7U3AUhFlcjCv0uc +fTwJrRzVpzOqkzjWOZpS3KSSu/+5LCN4+75Y7/2A2OicpuD8dAVGeSPBxyiMziVN +aBt00GKdLe6WnyofDTnEmEQ+LLdS9fyqPjBquHTlZyF9BRwU1xM4hd5IrNlmusq5 +57MqDaL9RRMgB8+bx9TTSUQQRAeSdumk/ekUykMFG/aItjFNR8PEth340Cz0x/ZZ +n7xgo6mam6yTnYVcA0lBfhRd6X5OzZEkj+y9zQmhKUBe8NKI+bdxEuy1McdCeRb+ +XRI2SBiW/k4PiTZLDwrP2UmeR58fO4OHguyO0oPfnTmbYunqKNg27TxuN6R/MqBn +YDV/gS1RYO+lWSfll2cQO6cWSBCgJVw3Z+XAlGGRxiK5pMHictO8Tq2O0w8AFWzm +85CT7Xut9fJafdom6h6lK299UKmVJXzqesw5keYUv4yI4wAgdjJsQ1450jMT4CkK +WknpIHbwcloCM20OL9hVhTWtUbUKQGWwnauRabLDPSoh582JjCAAxi4keSeWyah+ +z4TGFi/ft+ZAFxhmspSjILUye7GtWJgXg/GFK0G2YoB01bXcINduXVbLfT6FvWOn +NzXgzEQF9/BfEkBQYKeAbUyOP5izaV7fJ+UPlub1Cl5/v96zstDfKie+OHHKELWA +3WZbxcRQdJKere+ELXCFRxCEqnPXY34ZhwAiPYswKRBPIkBbTrtdRYaiEO4XuDtu +g6hPhgQRvG66usxHnpGxPONtDLN54uQbkUMnLYV1mNQXCtaFekBH4J/qZGI9rpMt +7Q8bE8PLCX6aHs4+v2IqMaEj8LhxvyXu0Pewo9TrGAnL6TBg2rA6mB6UyccR3fh4 +pYaLq+P8oUCKm/ZzI9k6zFHZIsLsBtIFx+KTZMYbpX4TUGa+0Xd9DgEJ7wH83dTS +hdoJzL3dEXwfnGeqVVhEYLUi9NXhEEEMBEyPoH3XLRLXXHZEPdhDSg3kB4md/Q/a +Xf151A6UsETsRdZaJBNK4RsjdcIkheCsE3rAuRiErm8rAASThxoAJ0VWfq96lMar +D/OkvVXgEYVDRc04DyH4W2DQTqvi+37hjxxOoxUVK4sQYR4wkJHYhgvyYCzi6ju9 +1+ZBexKErbnfuOgDpBeUsyzLKrSuCFMz36RpslgBGe7GeKRfUjL9ehaBIhsDntbJ +z7aIzsnUls/yCA2O7Cc8tTvteZy98HW9krgqmD/QR/EKNq09kw5EiKlq/dUpSgsm +Ql4x0N0nFirqnocRKwHWJ6eQokD120/OjB6m0unYp4l4fRll+fjOeZOGkS1NM9nN +OSXe9vM9IGR7ZPrdeIUJjFKQVYzDJNe5FhbHqfaMCzbo30haEfXqqKY7EhiUQ80V +5Jbqu47byjFmJ7mtDoQKvpMXCH8edxaZqZU8sR1zROjLUo56UJa3g/lAmXrle9mD +b5lphZBOuYuzFIWov6DEUoTmUuyvvfMJLMtPBdo4bHsZl/XfQzFyDmpZqyM1OHGU +TgUGfmBDbCXYsXWCE6i/l3eTrn21LXRHwavaeuex0NNrT6S6fQdSbh0kTGiXpgG8 +oOPXO37BZkogXLGgHnPlpBaTtIRbKiq1fkgdNNec3aNhsI4oUCQycrL0tjiynwEd +GRCHsNdMpzLBN10xc9zOmn+Jfth+qw12COSCL+3OtduKOkFOTr9GtPU3KtPww8Xw +/8XZ7vQsyArdluRD9SHDcv2M4EIZPI/LqRSxvGFLzsu55W0/byW72dygv7dRSvd5 +geZXPO0WYDbYdJX3Ix5oCIdjWXDoU3MY6/GFEFQULdRgvGmPN2gl33IkBCcHOJxQ +JVglnmqb4W8v+veUbXgYOVCrWcRSMC7xhzMkxmt92sxg56K+C0JnLBx3848CzFLx +O3WlizRLYwV5sYx/fHvPiNgzq9BpCKozkbuh01wKo7n7Hs8TOBftzxtT97lQqSWJ +NdzZTGw4M3Vx5Qh4n7rLO0rG7XCJ6QcNf3rzY4Jsb40cMC530+4sZc8Cnoa34Q3Z +3wRAcZy5pjqOp67siYT5n1/c0TraiT065pdjwAO1GRQ7lBq1R3pc8rRx56tuHI6S +vUFptTIBTiYTfGUTNNV+plbuHc/Ke3YNWWI6IGRl38CmacOi57YbWKQfodjABgQn +B/y7EIGQmmcLyotiAR+aPenJxcODWGk8pzyiPhOkdnkZahQibxRha9ozv7kbx9H2 +eJPzW42Klv2y1EHjl6goF6ZOb4J83WwejTeBH51PudryI5ALm4t52dhDLE0gjhvG +s9OtZyIu7LaJ33BJHrZGqZOhhugbVqep99kbK9lV9Yapq+f26nzuQB9PENtizZlB +jF1zJedzfWmCtzzD+WyNR0UFaivew9dqopZUtxTEdEmrxyBQjBq/58qS7TF1U0th +Eh6KGOdwDf48IEgfteQKqRodkUjxdk7P2iymYB3HuBnB64DrM2j0GrPw3fW4/eBn +5vA/1aUyAQg5x1eFb9XMbf9umaAyulCB diff --git a/scripts/profile-shr.encrypt b/scripts/profile-shr.encrypt index 8d3ee22b3..195e4f62c 100644 --- a/scripts/profile-shr.encrypt +++ b/scripts/profile-shr.encrypt @@ -1,197 +1,197 @@ -U2FsdGVkX1+gpzxWmnpkysVLsD+byqA7x+5PBcBPWBXAs9WXD/cqZKOGfn6a13+i -hb28oQSKA2kzskuxLcHObk0m8xlaB50LeCULFkqdbNPV++DXAMbflYzTSJNk3oag -VbEyjpjpma/1vHQxE0ImMZFmFXkWJJhBsl+yXlE9TGJuFknYiVAA2yj546OMLceo -bsppfhoEE8QX4WnH/uWUZTRro69ew6jaEXvPiq2HoGtL3IbTbF6OKj8BCWiYz7vh -ymHBtAD9Mn61Y2a/vSYhmOwPL7ckoCSkZRgr92If7hs3i9poCHB2h0AeBnihdzMF -r6jG0L1W/mOTvF7ed2xeGHfs3iocFEzrISXndqo9bmbMjyqlKLSzgEvnT2Zy5OJw -CJ5ehc49bSkjXfanZZv0f38QSjiCE15HvEI0GnjKBHvjU9sP7Lz1ZmFars1ULikU -rOMxgObgS0arXdiWfvBE/Ybdtcrg3JhAy9skZmJ2GL4Qn2PqmtRmJGoIkZTW89Ld -NHNnnLLcyHiBvzC0ANOp3SGLqnJgf2K12mSsRtdoIDwJoS52ylr7ihL5LRCOiniS -ZGbCI92mjkpUFb95w57a6x3y8TlDy+HIu0J2hUuCGU2Lk9xG/xEn742vGvGzBJ2c -f5N1ttL98mfn1w8F6fzxz6V0Ddi5dsYeWwsltq60PVhYbsXxvHyxowgk1ppSo9Rk -KtqP2yU930QkO1Y9e+0ORW2mwuEHRi6/eKByY6vkw7S20n30Jh9VtC8IolXve5T6 -m74SOg1IgFspVcx5z90kq4bgVr55TjwBTI0uCcADaCSVSsU3fvOhutxoBRl3scmZ -/5k70UYPkz8TSoC7IYDF2fXbL+wst7sOHOTRaFGWKREUiF9+j6fb3zE0B0JJKLeo -pngQVqsTrcSL5+le6fG2rPIanZgJjOk63Ty1X/Q1pJuotdIr4+dl94cFBHNEaBNf -3STVzuZGGTcsDMshIHDv+t4D1jjRngqfirgnQyIt7lWP02uT37hHEpt23k5hDx6f -Aumt3sdgWWVuVIZZRIxbjiZ0USaEzF0+goxWBPbGyj+a5IENAnig8x8oNFiZ9x3d -CbRekDkP3lBrirOT+YHFypPa3vEnVsaEls1SWhjxWV3tseM0l6kf7kXfByLLfkiH -ARLdk6o/zg7ECrzYepXez2l37YkFp9bEFRX4yDD2yrE0wz0RchJQau2SL7SaDGau -IBkzYcuSYgvlPvN5Ah96HyYpyBbAY3yHvwEuNcZAF3N7xZBk1EY5ZHjPdzWHz4Je -8Y9F7lr07TzrWuivZEJifQHEEHgjsBC7YMtbqnC/hOeYkU/PpmieqfDE/PLg18pO -7OAh3C8mS2qK6IdR0kmdMz/V+nm3C6D4gZoLpQtwWCheCemVIzdsqpRbqz/nUYd3 -P7x+gy6FUYxav/J6C5ynjr3vhW5d+qbySdOGQSg24R0A546xu+kj/YSYYJPDH+s0 -0pkLpyDOBU1seWuxwzBXVqlhNB+ROZCSiGX+0rdqME3lOVbC2Qv6hg993v/0iNrc -PmCNudPVPU+8AhDFnYbL4io6ivjJsyvc1tU2H/82grN9EDHVuQR8Xfo/k/csxWbj -WhDqdiktfdjLO1p2inUwUeFk4OhpsVP9ahl+9LnhFA0i8Egn17Z7cNHFbRInZeFj -9xMQMnXYfXHx5ZxjFlRZBOdpByxDUHCBDrf67g/14TlO8RAednIrtLandH1s1DvR -ZamBL915dPLsBHQk7IM/LoSw1H0Xbt9O5MazgZzLiL5JA6rOtHvGauM2vztsjDdx -iritlLdELXgVN9+ivJQy8h+LfuFduH4ALGG8BTVx+5eKRewsX85kv2Rjuz61oaD6 -K6O5nW0UnZUOOaayjyBQE4SNsU+WLfFAVZLbRqjEE7txBCvPVDYfWE75WhDmnKGy -HsrV7be5C8uzyHf2aK5W3gNHVgzzKMwNDstGSKN91LlN2kw7Vp9IG9J77p8JK55+ -y7n7ltooIECIqzb7sfSpB3bhzjkejuJ0f8fbHT0WiTAJcDLmnlSUAhkYWVW6ioAY -opHE+3fgajlGkG0STxA/kwW38RF2jPp7jVsGo1iySy63SXgi4m6aYPChejAAYMXT -L1eDkUElDrXWiLnXNjbfIzlrkoyvx1OQGDeST0dBVSJhIPyqoZqZV+WCwFQPaFL0 -VxfaeOdq+qcDVZhYgyal/hc0cwGswrXC880XZlxARR8IbJE8U4N0XfksFzjMFwtU -oq3hAbDt4qh9RRpt74GrIduGBEtn0YEBJUZCLEv+Rjq/dtBqDI0albsWVMt8lfSO -2cBrFLleQJsMo48tu/7aDi/fzWtKJhWY5c16SzTtrsr3/UQY1+IQ3wUui2rntGIE -aTMU8Pgkcmlk0Kt01KCdnzgDUACcDyuZ/xh7KwYzQ7R0rUMjizohoqOeUxX9tVGR -4yztgyovyl9zQfN96BMT//OGoXLkd5ZT/YJ16MLEy1miq8r0W/PPh4RmAyW5aLHO -xcsPaLIO+6P7m+BTnl5swTYbGDzTNAoSkawJRvi0kJYJOga32vIwvGgUnerfQMjK -Nq5afSmI8MrKW99SWlhAbkQlCC1OohUJWs82IULNwtlhLdGM0U2LsjZNQb0w1l1s -lOluXeU6ampj08bFE3c/3aTojVeTym4ALsm8aec7ezHDEfESIAb8fqq6RFQmjeim -JCH6wsDkkjmuX0/5RA4Ke1mWAdfULbzfMZAAV9k0N7YP5TQ94lSeTJ322qLXD5Z9 -93J2Qg6u8q5X8hBX2vnrJt1dMcwPwgRxDQBSUx5ruBrip8CPEzu29k0D36yufdoP -PA9BlSwj0kluxzBFpQ0iLb18qE48gCUCypa4jw6tdpAb9eXR+DcTR3oMEhVQoCld -uRrK05ehQjpjk/oQBNohHheB3zQhinmN026evmlotHufcFvTf7tQbe/8LJJRLWd+ -kphZfBsD4WFbfBu23bZBIZoYnmfGFcEGWjOF74OYhvzlU8It6AMDyuzkMwuk70kd -J1Oj84Y7ei2iq6wxiQlfMYSaEwqRRK/dGvpyYjZ2jqayBwhcRkFAkZ1B6AEAOMMp -moGDzGV7NJqr2j+phwygXXqMs7/ehBoxSGw21AWolNxWTJQEOws0Ld/ZZXVBsDw0 -j37v3xyzecO9UG2vWGACcedqSwEUD1IeOF0gRYuq3F0ddj7RBy88s2tqw07pbEXt -6JceZl4WFW6hk423UC1CTZ8vhqhN4565CHc3W+w32rOgzi73bzf7j2OKNKV0R7Vz -2aXrDZAwH3RmDacxwOnQ3aovpNiUPs459bCB7DieJg0rS1q6PnscJNKxUHA9Vmgy -XRwAr4ShtZy0F2TPwj0Yrku19/BnkFRJwJxEZEX0JNTgkIhOeb7Rp9VDOg+S33/9 -o35A2gTaSooNedI0UU5V9vvTaUvkRZYQR8RkBDEqoTOGWYdek/XBgpf3nDN+f2Pc -Vo4eiGisrXjNkbFB/QLWEV7iukFtDNUxhVpdJ0Q1n0mdWtyg1N2Isc8WyIlXutnG -q9eH1PbeYQ0NKcqRMBGOY+XMVaHShXMDeQFYerVhRNBZujR+cLHKvQKS+Y76Cv4N -61jyJbmf9Sih/IEdBoRQn6hAhkYWSRmyMSvKX3L0fUdAAYMoIoHY24fS24MJ84gC -NoG7vmN0gbtC8iGSFX3o6kIdyE2/gnf3iK1LqQTNLDwwS9x35PosbIGRe1moIDyI -y7FGaTOFXKCUnQ5dcOPKLnHyUDGwy4JUKfOfKxG9LZUhoYWLQr8YrXEDzMjulBjJ -+cUq262+b7o+TtBL1o3hnTaB5Nt4bzk4FGrzSpeEghWYMTuotXDjwpXyBP4dYhnH -3srRrO9tZUn6F/mO13vGTrtAvwdtWommcPPPWAwK7BQfe2Ux2vpB+OhFOiL1PTA0 -KKu/a4bLBoXP5hrFi3vCVesbPcK/IIPPwwbH7XXwreyV8mpCy+C9/s5SSN5hV+7F -u26lZGUe/oXprXufDQ4dAIY+D4ubbTiKHsjRMYZx3AVWW6fkqsKR4BR9tkko93+O -W7otfpJqpKYz+MtrIqSpkng7FyTWc4Lk1CDwLwa2GYP19xd4bxP0T0qt66Gz98C4 -N/lnHtzZ+7yN1IMJV41P4mntNrtqvXpugA9b4r+A1woFGBiLNJDLnWI4A3VnskRT -d9IXq8e4ODWo2mg6WeinVi446WfZxy4a3w+v5AEcOrIE8BkP4ynxK1wzcEZkGUO8 -9TT9EY7llqNLoI+Fi50MokYcPOVTUZ4HLLQwUuKT1KqGFqgbYBpK+sZ5xS4NdHRF -lIphD4fGTiPTjF6ZWbbTCnWwVdr77f+tOlHPJjxFrJtWOthjkWH7lHbPIHIB1qh+ -0hbamNT2VMTocdDr37v8NeLjKGkaRq7gsAacv6OcTPagfSpOmvj7+pebu0/M0sHj -Y7/sqGEonjHXoLh5EOu21qWpTlo15gBomXhw163HHN3AmwiK4AI1WhYJdsZ1vQky -IpgvHb01qapVm7TSmnv7Ja5yC3JRo3+hoI5pI8CUefBWXkoWXD95DUXomZ9EakdU -gBoWqbTMFZR7MZTQyVOoP8yFIzqs3wv5JRgAy9hpRDIso5vj/GLkJGa95ETbTU/t -qOnFe3uGf5+jbq8+8lNffUAJX6FRCMyQTS9uaSot/P+Z7zLHBkCOeALacT9XwAuY -rCHJtajij/Y/mF7OZBlRrqhT08+1yUn+MH4oi9S0oHB+603tRy7dIk1xFZ8ohujb -soqGjZ+lx7nIPjJpe5RkZ3FlmSYCEWuWjNVzWWkkJmxuOP+NZaVjnaBrqJlwUy8M -6Q72Ixhs9CaP1S6ICC8xL8a6ky0+5QrE6eie4nX7mqxjXn6KatjVpB4TJTCPiEpe -k6nkfSfMmgWtd7YEb11nlrlnQGTtfy90ZeSChRLXE65zDrfRu0YPkg4ytvoTISJ8 -HOhEALkN83rXQaBGDGsFthT1i5bOZIZeC/Xu3jtANMjQBMqnVTp+7hBAevvCJDsq -kOd0LAsGYjgIM7jiQx8vEynu2l4ORx4s2t6PWjbc8N0TZkwtKp4aaEt1hxbau9IQ -xgtdICFeoS5og2nV1cmq5Um9eVH6yFr0QVXnRYXtYWw1nanWXcJw8ZknaoFIXo4i -kfJLxOLs4z7gekec91sY6MQbO1wCbp5qAYw2Y12GmJZX1eBkvIbvGoIEkwbdASe7 -00mvbqQT/iYMuaAeMCdLcdqTHEuITF/Rxi5/QRkvXCC31jqOtmnOGwOZ93yrngKi -y6LlZ1t/QPNhs8+wIrMGbeynL4qMTBhxMK6jPWQ4iSAKOLIv9kOUL7sKkbBXN+pp -sWfFnWIx1EkoM3CkBNK46XhHyNZ46zjNcuyJ02Utb3Ls96cnnt9onRuzwRfp+dsh -2PPEk3hcCZ1Kfxl/LBIIaJFLYXmvNK4MHjzFLrYYtu7zAMlBNXJZ0RQfVo0Qsgw1 -vnlB1z0Lqd3Gulq5iPEUlOl4Ii+/vjXrP2WhZmjOiplwmh4cFNsgrwxlWi0K3eI/ -cf8rBGoe3BzRWvpEx5cthEeNJ+YkwiprbriyvBiIiw0UhdrC/BqTXJmy2fQIdMHB -4jiayeB977k/GdnF0HWRszVD9zEHUL47Kgmm4QAzsAK0c864dW0mepf51GaZP9+i -cz8VL21TNLQ2zto52eyrNKrG6sd6XEsybcd056xf98LqoNxxUywqPY6ENgMK1VaO -2Ce83JBpZUAKjjTNz3PZr2lcL/epnzzK+UghNCQFoAbffkYjsnUng5yj5CGwg4ST -3XpXvUio/O9Z2oV8hhZY8sQ5VGiwMkRfAufXKqwY8SGWYQiurUWBsk8VNmvs3JC/ -7o1Lr/6ZEiJxdzSUV0+o/rmS0DpaT9oMLYQ/dm82gPpCsVIvHcqqQfmqIIKBrZA0 -otpXUIqfyGXBB+yU1SPKPwGq/Proif6GQq9UROgN4Y2trIkHvXAu5mZ2Rf09tOGg -DoSj/GLhligvN3eh3lewEO8qQJilc66pCSR4OcwJAOrNnkg5fMQ2/9G/6zYxwX8q -Wo4FJKjTN3z9iWKThnklmgbhHI/Efcg5cFBinoo2EH8gRTda9YvN7MST7dXaU7zO -uQP/eYlKIBdyS18d0mNsssBQpMFs8c0W2YDinzXTnXmXMgpGOiBiH2877I6VTo7l -Mx0SiQ02tsa/rjknBPRTKAwMrktPEoF8iF3dNnzk1Qt2OKYawY7zAECwRnnZhBlh -z8dW67jv4DQmeB8OZLesaWmAQM7/gmw5ux+W8sf0LAonhE4dYSbhLjsehEL6zHW1 -1UazjQK1BycAmxsB09kDP/G4s3v7wrlYT0Z00yzn6nl8OBdSUctjYXESZAcTYMzu -xH4vAUOg01YSLCmMNT1gE8GquzSLHMJHw1xZQdXtucEIm6whVB+35I3nTiyfyL5H -OaCt/zb1TjfZkLNhSS2XOE9C+Tr1NX9obgGH2syljTL+dFs/CkIfy7iyhDL9Tlk9 -evlyvCbvJpFlOCcpS4BKNaLICJucTnHy/eZN83ooLRdqKCZngZUtdXn0QY8qGuJm -tW+AKV95j882dZFEz0PR5lWfpj9FMp3ESmR5asb6ciMZmvBpfEIivc/n5YCZxvDp -W+CmjY8JZfFKmlO+E09YjFK3xSHvwtJx6ZY8Bfrj7LBflwHIOyrVIEPqiOeWxrVu -VTiioMKI1bwcAFg1Qt5BL4Aju1r7cCKHUZZKsoocHfZKkOXMrGmlwFLYlsNoyfD6 -mbtYkwYmQkBkVInIMHUeGsFvJXxwSremq8ZF5fopJofTGR1kUqBWm9eianTYo/vp -cSbykpkD/LYONnIAXMH5kOtA+sRI9EbiMbjgR+NnTUEZgG9fyJAbEA45t0d1lchr -s3+jT+wwRm1PnIDW6cleD2h3W21A620aXL1GbTgnBf42XR3E5GwBEpYPF8ZFjIcO -jbiMrOQ82QcfjDbuxb1j9Bw5yJ95Bc6GfVcfS1LYm6N6s6l2Rt+37uF4j39WO78T -5dcvI5eXA1kuenx25hZMYZ7SLOAWUCJIRSLBtgLZx1B+hm4K/ezmrdbcMQsXbwjP -5sfoWZ45uz5b6I8eYr7HK/M6uiTNJRhPKJzsGlboB4PyMZ//XsW70JZNCYc7By6v -Gtu+HmIbGVVY/jwG/4nOF1nXSpnJZOJHXEgc7cc4hD3y0579maQsYF3ET2m6+iGQ -Pc76eMu/GF5RnK9XGvE07Vej7tAw71sN64bkn8ylVAUhXtHb+Wf7hEh5QDTCjAl5 -pk/OEoRrHikD9IvmYtMFC6oYnpMSPlaIJGC40dlaTjpsfVwQE1NYpM1piIBViKpp -OQRzC5sQx59F+oPtsVMUNyu+mXMTigsWMzmgDWs31uhpL6W09KpdIG+rhlRDnit/ -RU1nUqvTgj1Ix1KRQYGKTQcROFE0+kMx1sFHr/4vvdHK0HyQNlEXEvN4xqAGc09F -X8PTfJGVhuS1csWJqNWEUjJ8DPhIt9IKupX9VbJSxNMSmeYWUBa8jD+P5aQpHrn8 -7AUNZCIDB0VHlKrLuoKCQXSmsZn7VExcZenko3EsvdmEmB14jBEtiGgDaDDAqFR0 -RMh7kqaGwBu70sGqo+0T2wi8DVtb0CXHqj3ja6Ifu9ycrLVYF3L1z/i5wqVYV+Wm -mztDBaPDFHtaeYFsXxyV4FO0ygiwdpY8nKhrcuE6jrGObgtX0+O0ccKCpINRr3Qj -1Q6lGymxQjJVgFourNcopk7Dveq3RVzv6Gmeyz+pza5fy0OWDeX9I3Azi51omod5 -/0qj7PdQtwAwksc3j+bFfNBDtU+YBmGXK/ufAZRwgBLqJKRiHpqk7mtJ/wbls90J -5NTsyTsgIuvSl6CdgGS5+Bxpq0UY2H8gpHSOLeO+tnd+bDLEUlRT7uUhNO8vm+l+ -Db13aStdAa4hFrjA5gmWb5PWyJSLRncZRPs+NAXGSUZ3Iu8GS+fGiwDGtZdui98T -+GAEqd0kPWkqCwjsdlIIXS8m8C7iNB/WGPcx1zrhQdQAEucMM+ZREn6YPwxcdqwD -DIiOJwnv3+iW7DOa5xPVwWNLIg/XAMiG/m6+psStSjuDRWHp78qU/LYA4JuLjnZx -XUGlb4/YCGQ1FeDNtkAlu0Ltlw2qj7NH2j4DBMjcTJkSrf9fyyF68nUnjIGc1HDc -2gzzAru05YD7/3lUrClaNb9VMK16sIpQHYtvRnFvJimZKBIUuo8M5lxmM7S6jnwr -ueIKKxOLlSA7cHx8aM/Ct+6sRu3K+5e6J/5TGDpab+bzPfBERGb1IohRsTZDgO6S -uflOEfq72+y0pxjAUAVyAQVw5zZEDm83OhFYoCnwr3qi3kDPL4+dqUW3/zaSXfGy -+KZAvteWEhsoysiWze6hn8WLkuH5lIfeUm2nY1rJASTb6Ob/tFBPXOFo0IxI8gMu -GRH6D6d3Ff4uWky64LX6FSXS1q2aaG9xRvn6OEUkKUQkeQNuuLcfcwGj7CJurSui -BhKaxFt+xv1TJK43G28mPAouFak4DryeTnrzJIEUqBYamNbzPcv1jpi9qJgkHaI6 -XfDbtSxGZR8EXen1cf1eW4gxFs63uDT4VZ/UbNM7TuqbEz5St4ti6ztY/vbiTE0J -l6Js6IGeeKQXVDin1EkVP5snxkxUO6OfAOT+K2r5aEUPVrY+GgCftkghO6fnh18Z -sb9/IwD398fbnoffswB9inAggImy9DUFEcNwZrpre1ehbH8JNQkeuIN3ssknMu+k -h40PcURHBIyIq/pbznaZtWgT/rPTa77+Y0C9nIqyBjZHuuZ7p9XScl6RvvpW2rRS -MQATFuI2uPyF86A+Mir0qf+ukxMj5229vZBi7BB9MHyHjB+9C2GwulyD8tCiKY4X -NLTPjJgI2MTc0rnbyfzT4sJNVe3LZj7mTgz5MEFW5pIo1h4Vh86HLT7rS6kpvcuX -EpK/Nygsq3KaouzizP1xtPl7hIizFZnyaSuR8Z6hS1GRhytlSJnO7NdA+tHmjpxl -mvyjyQruQZatqMl8dov4KhcBbmmdA2twr3nqCo4/J7evu4hou46USYZyrlwdECY7 -nIrLC+23FnVrKCLLPwRccRAdyLuG2qNonGCu+LtD+OvhXj9HfmC4h918yzxnWYwn -UFWTZ0DgeNb+vL1xHyzvB/ii5qpqKfFeeHSMSIr9+y0JS1s4DOCpKdxsYoukVlt/ -Qz5aZX1+zb6vP6nLX9LOLn3ePtOz6So2FcWumraqyg9K55dxwONdjqwvTvhCSlbE -BH5XjmthhrxJP8b6XAzDUaQGsBovVIP+fBLhjw16pt9zYd6AzpJV64cDW2rniG36 -AveUDPvH2DVFkDwXhOEDC7tZLm9mNfkCRhYrZNSYSpXh7MvpTsR/N672xlSzMqaT -XbHG6Rg8BF1s3YZo+vLevfsaQSSoJyy1m8fSGvWxA850czks85/h9jgpOlo7HcbL -PtYyX+BwAHTQ/seXT7edUDBES+NvzoBgkV34Us1MO2p6AmBqgujLl6cB0weyuE9y -xeyRjLiSWoZ+1dSbwbN5fu6tMrtiHXApC3RMP3EnoNq2dL2OOvF4LaCPb4DEloKn -hIZAWA6DFDHzI9vuHSdTmJMz7mBdaWS8Vp8taPgxXF3+c03/jMnYvIRs2DEyysvk -vzmuj2oWPCc9lww048APplUUPyWs7llhdQwkJQ9N0PKUTRhFqT/v1+efMOzYqBOY -XsM7pqLietEoGDN0TT9U09BZYQp8HDBGiddtTwUPXISTwTpWRYneZnql2mEGKyYX -0ZbyCHlNxvA1tkzdi3KQDlVffHOQb3KolhYvUQxx5H+erTlPntPQ/EWljxpxpubG -wAhwzzeZcKbkbOfhmM3dubjvDsmM3Uwna2uwU+kZTV4Gz10dOdPQYvcFVyTVrHAw -W09ULjNUgeGxOFBEYejqsez9l3dwFDL5L/FJ0dshU3QKV3qjDkyH679Gj2wZ5U+h -MhtgUF7P8qSkJhExNjSAEk2TRLZDr3aTp08gBAbcS9oPHnxxk9bFqs99HHXq3+fM -DS/xTf892IBsLpU/2n76hm2YtjQHh+ZKpap4hQFFLqTFxboKiEgyQVSy+RB8aPwc -nUmbV01EKE+vOnjx7hRWk5SuSvRVq6UG2bRzqzHuj8Uo8diuWaBxuUEE9ez2GBUR -2NcgD+hEn3VeZPRmEO3IiLGnHVhqyAXm5EEAe76PuzMW0Z+h0syJv4oVnMFdTNTK -0H1Ch9pYxuWUNF89/lbpVcVyh+w2damDLkWdUhm22Q37oes4pVYp4A7recogEiqc -mDLjOW0LlbSPZWPmi0GlKpRWde5qP7uzQY85ZenqemgFK2eVLBfNjh9vT4paPC/+ -t2QgZHqlqEHoGmiELPJdOkJpT7Prsz1IkDFqBZ4dUdnWZ+xKSDX+/s1ZZDUeXsC0 -kNI22ZAVJQApiBbPM9cVtk644nKuz3GT9uqx/zoIA0BK9aKu2LdV/ke348xsAYM0 -DPenNqS5xCrMbi6NJOyXIc7v/Ch3J/ZAyo8Qq49m2GLlTW0pX5e3ZeHmGzBNgMpD -T6fldc19wGEOYQws8GU2dw9g6iUC7lwfv254LGeaLuQ6SrTpoXNgICY/ZaJCeZ1E -TKjR4oCzPogIqH1LgJ9RyGxKQ6+jNUOGWf2+JcDjCpGF7Ndr5aA6NiBNuwqsDLyc -tER6AZuFI7HkbxCZtKpiGtv8LL5eNSelH1sUol9jBSSqTtiM0rcdz/ZseIdTrNer -ii0KwQ7ZODbfeEuXk0l+3FiCv/ijYFy94XA2q3I54TOEdAI4au47/koJn8Dm6l7z -7VwxhIEXBrRpnHzwB/7m6lh5l/WctO0sjhKCDZa50ro6jvtmn+MUvcv57nmE9oWw -snlKO60Jgqm4kqnxQ07rUB088ig0sywdliHy2n4p2Dbpq7QaBDA0n4HPTIr5Fsxh -Om3BDMGFnrj9yRbHV/UP2cTUOoqrxhwBNZigpkl757LGUF16WIj4FDYzIOPElupr -I3ae3uu+bv2Qks4JSvpGyiBluXxfnStmGJZVjScL0K5bvANqVzcGTZX11wzY1ZPE -3pZgWWsR19lmKGZmx5cDv3UfovSwVp4Lo68lmJFCxttqaU2v3qCiiyxjvjzBIH7M -zbF1foDmll99LnZQa46KxFlAvv/hzaZeqhsLIETUGxoHRWMEG3l/qHlBCSrNJq3Y -KXRhYlf6vQYIrx1//FPeCuGce4nyHnmt+lpRMQWut9EAWDjPRJDbYb5u6iKdCdXk -cgYUtHMN/ltJ+tFidiok0FTTvFuy3dZxDySMIEOhcQdFqkuzf9nTn/pEIXg5OjmA -a8Y603QM2EiIjoXH6O2bXJXtTu7DUVeezpX4wfXz7l7j3qSLFvbI6WNfIwg6B9p0 -LJlawxhlEZbbqZJMO+8OmwP7+sHFGIv47oBEUNlHszLFN4z2r/PBeoprGIo8cpsf -pYDf38JeYo0hh2uoWOR2+19KuFNLRkOgD2nMlQUJKsHvHm4UH4IXFjZccHZVJZ/K -tKdhAeJ9CFLTfAIIn4jzAvrkMdV6tbFk+e1hY4MKribfTp/uoiAuukb9C75xL5uB -qmeCOLge1s7sEGWlgx/eGtiXRsja7cyehyFyBajrtUN8crRjN4D8FMH0meQR7Xgw -rKBEWNlsc/KJBBnJHKnzLeNDkTW/XwKDX7RWkLmTbgwJtVaFZZGxVYif4LZzLzxf -fsJTza8ZugP07TFnivnsjUfwfpc5Cnv4atcs/uoc/3oW2Z/icAUHq6tfY1I/DwGW -QgWrqZq5WD79fSjjgSQsLyEgDIo12o8jlISxlcAl4x2v34spN7ZNhyLiHFZT+iNX -GzkdXRmCQTW5wUFbdaZW5ZdCfGqsJSkp6OhHd9Sk5gXfBNk++nK5VhQqCeijFmi6 -P8HeZO2F+mm1ZBS5yFXLfE3a0Tgut+fey5PIbH9Hja3Fi5sdqephsDtNn5GOYIqI -0MAnpTxST1X+tMuS63sRZ7WqsUvaCSNL2XKLDD7yMNZ3O2UZ+AJg1c4gk8X4XcaC -12w0eQbn7EUJkasi8DcPnhmp65+rLUOVtm/vhzVTfkS/oZHJIptfECPEdVy+0VYL -C0getEsr+wKvKFLTZzBuCXOCZvUSSIqTnWKzFCaB9vHTJr7Q4d1HOQQY4xMHEbxI -h2ibA+v3Y63Rpp93w/uYtJAfecbD1EgbFfvjDySVorJqag7AFe7/gYp+xWL7glNo -GI6YR229uticisXy8JmsKlXWzvegyllYOjVJ6JkIyciOKfyr0TNO/X/Y4qa6Hb8B -SbfndxuvvNbpwQ5Lgo2K5bPFTQwyIglIe8CFoFVzzedHTI6RlaZJsy7c/0Nzk3B0 -65/8k4eThGELoeLS/mIwWBT9EY1Je6grVx5xinJFOKhBQbPlWw/aCpyQCzhR2FXB -Dr/5tlM7jrUGjoHrulyUTNVvmPqOf75dsMQwWcX3oaWkounXhZ3PRAd0l2DMNoAY -+7mw7IqYT0ccaoz7+e9YiGDRpNfhfyERHm3JuvDkZedeLtW3HFybTX+1ll1AGjy5 -oyfr0ANI3yQ8KxcvBjG96zB7x9IP3iGN+KpP2kFaEQztaXbynZjtYoN7R47Hb5Ee -525wPDIalX+d+IK3t80fi7lDPBnYAFslt1GhsCsEody0vCkxvuvC6Q== +U2FsdGVkX1+Ro2OkwERnqQxRgqE22raIG0nuZNU+s4yMx5bMTOFAwynkvI/D4vy2 +rb+wSWs/HErbUngILXkAWf8Slq8hjumbVIBoST1PyBWIaw61cIBHVRZymyfwv62o +NPNkcegh24mT16s8zGjHItZ65hxbgfWo7mKZ0hO8sC/fCzeTPQEvc0fuz9KacTez +sqtaz/ZgTpUG+Oq+a+rhhabVfBwpZQ/U0Rm/U4MnY3Ixhzeo6xWIETMWO8oSxc9N +GUVcNk5soo6rwOvuL/s2tHm8rIyHsxSxDtoCfLy5euIwEOriPgvjJ9fLV1KVqmQR +WHAxVXuBKOu+vA2mojPuPYkf9ltYW3yLDkZJ41YSl9KznjlEEYhFgQQK3Rkiqq9h +C1MjNwP2DalMLE628BXT8AUc6HODuHUuSq/fB9UiWqrsHal16quyOTekb+Q96J8j +V1JzfNGS90qhQTWUlAQ36NdD+60bchtf3+A8enaMqCequyaY3PLDDi+OYFgVRwcM +pXfcLfDeqmb3/Q1Jd8RBoz7kZZ19os0ad07HOJEPQHkFSNUfSEGwQ1vbk7R77zj5 +u/gyK3/RIYLSIMpgbdweAx78tYmnABXJ/8yBace/MpyHmUo2jxL+hweLH/aT6xzD +MyISNtK/Xw2aButSh3RsVegNDz0gkid/c4dVb2lhZ+i/rDaY2kZ+8vYr4A1nEgXP +n8A4IhIx0u3ssEOnLoTvBMBurxi0aiqyknKIm8GDveA9l9d7J8x41s/SXWM9mI9j +2/rcXbTQRn8nCmFJazdhsnY23b1O1cdHhw2MMkzQ76yE1iTMHLHS+nDJiR9tf1Sd +da1MXgdOPV+rtCvMJqFPizBR+lRWHEj0w3zJW0dPIbZXySUgZRuFa1L6sQ6DmHDf +Noo46fdezm+usBoJHwvc/K23yhYDrYAPPlsk5eK9mQEvlgFLnPZtduKMwys9NXUt +a10rlLQvMC2NeP93dZCabYmCYnFdIAl/kTqpjs+uC8kFDsdY5W5sS3Z863DBX/pa +mB4aR0B97mfWaGHRqxXYwqehvYmy6Dhq11aO4syKOERqrzQ+xw9NrOJ66SmxZdMJ +ckPx6yoIRew/3fdkGIdgtr7Jd0vDkoKSouXAOR1sdq42ZrLse/Ee7tSiSuy6kwvX +BBzPxKoNyKaNXTg28W0N61aJGn3BrEG6Eutx37VgC7wSk4awjWhxS0ld8DjIbxFk +hQK3+BJqW5BWQQh4FkHfvsWgKSDqzLWbXeMc0WacQJ5tSaMJs+Wc6GTbhs4zcEdm +XyBYsl4eagtr7KMNtLJGzFd06QQzb7T/wbE4Z7V4A35LyGcz/KAr8jPaFT+Z5bp3 +FgukvBkjCaN+BHOakmXPph7zeF93akIySrNm/igFauuhQ2DgqMTtMOCg2/XewoKO +ZbOWcxDUPy2yqlSg0srKYHX+8IKLX96zmmryQA1PpLYJmVZQ/t2muyCoMV137DrD +F9gy8Vul+pxkmXQ9ezWeeaBLjGU+7JFCOMJBkFGbUt4Qsjm539FrGZ3Tne3YZ2cu +PlFJlpYjZAsBnhIKXY7AX9vkT5L2rjuh4VZrLNbcuNDxM2OnH414r5uWoyieFFG8 +W5xhHJ6i6icYjab9PeWkl08hLLEL160ib1oPJFdNiFVL9H1OBhpO5krYLMG3/N/Y +B0u+u3ujhxP5xZkCI0mvMZWO4zDlYUp8w7ci+iD9NzOK4DUJgXmxTswtLEa49mOw +hVD9ZuZByfwZChPCJ+7i27CVrZWir0O9ffjOD4AZ/bpNQ6/TAyS6W+W5fmiT+Rfa +49YbVmuPFS+uEzeDJ/rfpbO32bJ9fGgLICyp90dCss8lBoS6BHGurkf7XimmSkT1 +3onAZ+rs6T+pYrcAqBhscjNQpxjX2Tyc+6M/lxqsNFfG3Dd2SE1W9Box/lo3NS9/ +EOvNOevjjpBlK95tv0EiShPrBROcBvNxbxlteCtfVsMXldM8Oz12MtOZiT3nQPSV +TVbH7v/QNN4E/2Szb4sAO86Rwv6fGXTKsmOM6k1yiqi0y6j9WivwuKn1IiTpAajW +Cf8uQrXFJq0nzklq0b+W9xGHlCQJIHGCUFRHFqLIf1WHfi/jqYsjyUsAGMtcmNoN +1Ggzh2tX8vEWH4Qy0g+bEixlfn0eP22UWOyxmeWpOkDtQuBFBau0Dtns6I917X4Z +INRls60WtPoxWwUJqWfw0PnvkZef6e7qzzT66QSmuyMtIseTq3DYHVd46Rgq9c0d +ZjO6+irreIfmzoZuzHJnNuzj1mZeladcbzimGjrB9mJggspNBrM50q7YcuMv+y1o +mQuAIPb6L5u401Ti3BLMC/+fsgj1gT90r+cWwckXmtZZVaYB6xP8K3qlAzcf1lP6 +TXmHRC3wywS3aDFLHU446KU0NwjQ98Jnc/f1ZNYNTtnlx0Cn8JMihXVFTDn71fJl +papY+tk1LGp+ViZzkLoQ7jrjrOmkQ/uKnTxqITpBRlZrv0W1upz5nqdET4bd4D8n ++5r5ctsMCY/0Aa8VksgRxsE1fp14XItlH94aI0UFv+eZ1gJwsal5qyTP6phtP2FB +i2mZP9tSjggkB15YRzlQ5q9NHG8uBktTVVC9E4blDTVaYXfxe7RiQ4eFNIKGbkET +4a07zb+wMIOXSxhlj0+qK6LJubZsDYQYSSjaaNL4hN4dn5Zfh20WtuF1lD6+Ujcb +Rqqa3CEVe763CSolSijrmHOd/GbfKQneeo1nYD8TvwSiGiI7iIcF1Y6LcMBS1fIx +FIfiwCjbwlS5C6lfW4ZsqMutbH33WjLlgsibrPG5t9kIRm+AeGrdpu79Dr4VYBjc +FPZ11LbN/aKaokjCXomK9grUpIgT4Xxra4yLVprUjgzfhDBdWVO3011SO77OgCtQ +xJYC3V+nmh26VoAPHUEw1Ep+Nr3PA59TdI87tNqDq1fG17Nc9cOOeUbgu/84oP2J +KEGfZnNsryTF5rhIagFqUICWvJLn+IuOF8HQbtYnvQxoJLy30YoFij3QX6K3f/42 +BokjnHXI5RELnVWL/Bqfh90x+cqIaRKUWzrK0P9dirTDdpiiI3wIXcUhysjvoBTw +Gd2OQTAWyQH9K5bkiaeloJaemPWcnbB5ELJTNnAtu1uNIP/khHI+rGO9NNfNTMJZ +MC3R7B/VCa5MhTt24l3dHeiEKbBjG5Va8D9zEVj17DQJWS8/TuqeV5CTCoBM+Z0n +pae0ldCJEG8yuoLEmy3qcTZC97RwSqHoJ04HxFIzAKon79o6TaNKCeuSlWP14X6/ +uuNvFvEj5MCkJKH+P0lZuAtcheZQqA4V85tWg/a2K912CtAdmaCA+kz8DzV5kMNT +CNZT2mnGP8C4Zzi2ntX/+NuitZIeVZYdxyLUj0dIv/5HL8lRxjRonI37Bvs3zu9o +edN85602I9fkI2mYCgI2IeFRb5zulrDIurrlrupl4r81h1M3uaFfFWtZOqtWr5qp +Sr61dCr7Mm4+/o/ex20tl7YFT3UcdvLZYV2oX3UVIqnhCQfEDDFmQ1kX+0sime1w +9yXWVlMhERrlA0VxABj540SVtA/gpGIP6/Vk6qcyW1k/QlKldtqHpMDpnX6JF11j +EbXFf49NExU7COmWb3TJZqTa+P8mYaMpVI6pe331gyIcuFuVKB46o+LtbVYhwfpA +g8O1xb1qsFZI8D+6sccq+4C62EkFDxWyyXf8BpmgWR1+asiJOquNlZS+//IXyCx8 +xLcePsMS6CfXY9uZcc+JYDdKDDNDYokaXt8rJeo4AFsgyzp2yZ8KiVbian7k7uc2 +5JpxMSB9wop1ZR33TT4T19cuHeedvnSFvJugu13TT52qoE3Ho2IOQ9kGUpjSBXXP +K38dQvKT+NlTmhCspbrzdfvAQEQDijPhITXR5GvqmxkwZgDGgygvjDxRMiwdRQXw +ZL3ifs6XE71pmZmK6MpmT1Bec75mVoiX2bNqBZccWJC98jMUCMwSdA0RAr6PCk0E +VB2bRxes0dnuztnV4zuJFGOG/oH6r7QMwKp61KGwIeZhwvjeLIvYWWzO/oM4a5Nu +cm1fKzp4c2J+FU+ogJWSF5Ek76oYdo0E6RSB42LBAPxa/HjIBDPAR9nqJ9j5mhvT +4ZYb/1PuoBc4uwCG3DqbxZTjwT6TvIdLBkHMGkYw27TtCO7KksepUKzez5jwOXZv +oQCXjop3J2LbQ4NkASStidjv+zCJIO9Cl/G1izcgAWSyijb1lEtGnD1lguR7rraP +co27WJZ7aI/OtSCGM0ezSfOodT6Am6Bg6AnzsZ7OCHadbsmp7GubZLCyE0dQf3BF +c8cHqk44h18oCT4ieiuYBQ5H1MQWDwgCAj4Ji5DoiPhg8FTr36PIeaIARl5L/cWa +lRbv5IW1+LRvnLamOBJ0EncQvdt9ohIigZHpVrSV+f10WulM9pFvRNOKADl0qKbB +ylln62Qg/gNSt+VQOBH5AwBDm/PTyKkSzn9N0Rs4mnSiRyjjOPi3sle/d6zJNnUf +RILRFHcfMGeKS7m9GTOVRCyhDTP5wnBVYr6YXn+o96Cx85Uakp08nqCpQqUerGss +XW8o0aaWSjv3M1HVf4ceMgtAqWDCcpUuCrO+l3USFztsy/Y+yoP/kN0a4UCFn0Lf +ccrvQdHxJo9qfqGFSa1W7dlDYJKGDq2oSyREa5J00lmkroexhJUPIeY8PTKE/SC8 +jaaoFBG4PPiOVZhEgO6KtjiDqukrBoEgvu891wU3i1vhVxnFmnkpEKlZlnxnIHsJ +eHXSwhHwzXrBVp7osbgV9SkuIasxGL8WEgsjQviBuopvNXMUBjP711K4EJ/T8NZl +u7tAW8SuqTPdlk3XdyarE9gnZceVtyj8j3RmLV4tW8jfskYouXHDsiosURbT4TDV +xXenZXNvKLOL3SjYwZeae1kRACLq31dcIbdF/l/W84c1vapME6CEBIyhGdhQG5HS +ftnJqB+0bTBbguKCnnFuNv/thnXqF1SmyKJ4TXg5FSOumkIPmqhaQIueZMDz3/kX +X+IBh0T4x7C8TdbPOa3i0JI047jr6ML8yXUt4vG0aGPFAOeuyunvbiFU+ARVaTJ4 +W33TcIoTXW+nphl89cPcum4pdslO6qO+kcBrsR1hI/7aPia3NdujIctFunil4Ryz +xl8o2tJIpYSoehnJly2/ZCN0sk7cxmQVrY/KizsQX6cGHhFDLhRV4lvJpYoCfnxo +EP3rmLWQvjuzxr4GG/TG++8eo2WSZ/r+d/qbNvK1lRHqKhWlgHWbw9xlQc8wg4zd +K9pp2vIU9j2/cE3v/v1VjuTzomNGOl3wZa6soI1qi0q1kGqr7UOitqrfMccrZhdX +neogbgnIIYXjJQQHW5uvLJDTg3BhCmaJCNyi73+8QL3jASJMGpQIY8pEd91IuDVs +QiSU1sxV/BTdULUR3Zy4h/nwC+wHUxhZbunGCanPE+a8ZM+KM1j0xSR7qUafGwF8 +V7OeUhKowoNpJJMCt2r32cFJLLGqicjGr8Ir8U/8VR/g4XiNHOgUlIVca8OmME/T +6T0LJ86f78uQ/cApB4IoW0XOP6bZ60aRRuN/Laeu7dLglXqiP2PNyMq1kpKoKOPi +GZOnwlPer0uRVL2f9EpAH+L6qKM4/h9FD/6bs1xdADJR3PaDndm3SjwRJuzTL7Nx +vSKV6EuzHc5JDGil9QA9DtmzhxlIYoleIE+5VJsn01Sv9FAlFSPo3r5+GTtAoBrg +6xrg0irx7n4eTY52L6cK0Ml8HcS5ePCGNBbKF1w3bBpK8GoOrBevtW1mfTKgyaHU +7FLVf2z2ei5fFhlYdMyX1iUoOv97m6WBN9HMmYXmmKkFatN3xfMXFNYW24dNe8lT +oUojB+mJDrOFD6NJqR+LcHIRB25XTY4oEUC8navhEafcEQXaodEdfp4tjxBMVV/8 +UUuTjkIJQc9wB4ndIErP9jwGzfzX7NZaWfKYN5oLD0E52U16l9qlSKTJU0JP8XYD +nLxKZoLUP3KJE4a8mJOp9INOVWqdJNNfToD/gBMtcCg0HwFk5VIqgy2Q5QsaNE/k +vaCa+MORH+jPFJSRseKg7qvVGlieinsxT+ilNwtbmPjp+3uzoD0BIad4TJ1kKcab +5eXW2K7tb8IjRgq6l0A3GGlu3aIXE6IQiM5OdDZwiUPPp/X77mfK8KxcDkZTU+W/ +zwMwKisnVaG5jPC9TMAHnZapj9U8y5lsXaOgH7fY6Ov1VBwelyiL8qvVeNOo5HR6 +ag8l5qP7LOGRkoPESNftCfiJxBS17qN4nTCepp4Ku2w0VLrz9N9mEz2Ul2am4LoA +X5EjmqZnxwiLRtOKhEjnf382VXmb71QRLB59jKhmbBmOq/DZ5EEJyOZRIHDZ6f50 +JvTm1NJZNQ66ZE2elUrFbgYqArCzimKvj+INmOz7CICotSL+6xPPLwq+69w+mw5Y +jfNJ0T74W6q8yZcAooSlybxwOjCd3RmaV8Qo4eBc/ew3UYPP1Kd96C9qxsqTPSvi +iNZucGvTOQV70p3vSSzCv0Doyi+mrTcrE9UITD01urx5zWPrvn9a8hhvtYtLYkOs +5kWGqgWLu/pvT+dD5jKU0yh6az90j/b/g7fjk1vVO2XHaTYs7gO60hSUWfPKC6W6 +cqeywNPcnv+bYZtoUwpdOLI4S7UwLTOrFxF3qjr0CV8vSzk0PuBIrTr0Xpwx1Q9z +NyJNInrZAEXiabxqruFHziylACSXsNXuCFo4FtpZkpmx4sABfNn1BvwF6lemy6iV +sH0I9mXtsao7NodERGrHSGjyDaohFlgmUXrIzypDcnGc32XoE7+AvYAiYgAOdzX/ +7rRzTDTlTv5ss9fHs2jtKSPERNx124Mj2K9VX7E6KGuy193cZyzhAEmrYKOaKOY1 +5jyX2hiQleB1xjOWg4cNhZTdEoGP4DTcQ13Egx6khUBALTTJhcJU/FX0JQJb7kCp +7/A83zaNAkcHzc4FdnhF08VECQRn/ZUvU2e342AsmxZv9opI7CSD9UjfCXYvfXnN +od7wrLGsBUChrVHLDGst22GM3vIFwC6gAKvCYrf3WL8wFO3KJGXT8AyGHijV5OyG +X7OeFHJCWoKU63G24FGHm4rKRW9S0mn1sxYVfhf5THjJXs8RpsUsH3+G6d3paSb7 +v58VAEkW2p2dyLA2cjqHLaaGJBgYuqyb/hP2kVu+lUMnzcyabncrcq0g0anvrA4F +R+WdNtV6KvWYToiOBQcpl0176f/11dGU/sWtGYkbYF8JXhf0GUVadM4VAC6BCWtG +fC4TTN0FuL1ZJjRVf3mLGOTQfPzag8Y0b2JxvdQo5dJ5BSZfB42LFj2VEQhKcAM6 +rj9/rJj0TcsG2B8wSrJn8KpvyIXTk8CluxMRSZszYOF44EitBv/gldotIS2uTWNN +8t1dQvHGoV/71U1QfzQW1ovg5B4HGIg3z21ju45bbQYvR2Ay8//vjHQmfvyE/PKN +a2JnX9tf6ElaAkr6/mrvskafHTktE+ttKQwBkwgfFUu5hX74wcKY1/JCq9wOujA5 +HP9xsGpKB/U+M2MHWuCqhCx7hBYeb/9/iJuFLFAPJfr/X3cJq7LRqPNmAJRQd5nf +yZod1fQ4AS0VM2bWeN1+pqDuZlGu5bOJ3S+9/B1AvE+QgAXWWNf6dwS2KvbiHdih +y0EYjldzN+fY1UMt+z7djTq51sdBg/1dS4xxsu8Bxdud1yf0su1TsENfkDI3SsP+ +V8QG0/FjQcA8ajCJSLNo72SJ/vH6QxtQ5fcIbb26vdAPY9ar5/Db2nypeGABpZnF +BNyBhqj95iO3JZf1fbsBdqFPK56NNDVuWqLMSFi9nYBuEeetnY82FNTvlae/Otos +qx5LN0rKoD4q/nLS01lrCOJRv43g3vdPZA3DvQaGO5rt4bkFibWf8bZ2yetHhIIa +dXKfi9tRkTXzYFZlT44DSf439CrVrOf6B8wuy4K2xHWft1jIMVnRuXBjY9fynVUd +ulMMPTi9WGvrwSPgGRlql4i7wXQv/efCrZNgQJnxmNOHofvs6ju8VKRh9jiTBOrd +ei3bOspnIS/kuCylxQUgJe8u+JeRJu+YahVShoCrW2MtiDFP/tYxOZmKVYf1/k4j +aHpulKoukPlr1QlnY8CDaEDXk9S+ZnPjxLjbvgBQaIVrxJj1OQteJK6S04fOjF2j +epyEJJmoFqL1FVVhOKhVBh0M/hq18CN9rCicpVfUVjIRe8zMf97HcfVeSUQ9G9RA +2TVl5qCL16B2SS4LD+41+6kHiKkuiHqXRKekUqDQCFOp1J10thiZWF15EA6ms+39 +ab54P+ZmfzNERyvzTTc8bgEhiepB1y0YrgG6U6MuaYpp+Jq9TPwkAVqkaJbe3vtO +21gUdQFW3mzUlGhWnCDJShKiCPwtF6zYfwcG4yERxoNUYCm+JYnRgRIpMQXyARyk +inbmp1mc+DJY1CrfQu02mrcvRDopfndyxruIt/RKdTpFqh8VFp8H0fCQWJdfy4Gr +zSx8rRZBxL7I9w7tWXXWuRcaZGAUEAFZYM4PN1aAcpwXt3rePli5Wr9Lg/5dFLyk +fbhaswP2i/x3HZlOZB5xb25mW+c7PeSGbx8A0zrGS+/oRy7Zy4ONZ2cNy2PEg0x+ +CimyDQqdrnkA8r4EYndlprsKhq9r5VSuzgfZwTYHqAzPkWLDGbBH1NC2vBM0Jwz7 +5y4dctP39I7s2K+E31+xVTOlpY7xhhvXjERmjIOKQ+dCNP87ay3fxd4pmM73i7L5 +qFkvNdgDe3bIfOsoiXFpM462cAyYiX9otH+eyvE3O5CTbYO3jLA7DhnYttGB2XRz +Povj8ODHY6F8P8HNVPU0wl18f/cqvdkhrndOfAocOdPKTUgmw1OW6r8gw/AeAoY9 +q49plnYTPKRNyk0u+kKmcudzMpzvzbE+F4t4iTYWI3NzdUNdljPcpKz6PegYw9yU +DtXYS85a/PSLfaAuoWHh7HkLL34Oles2a/7afyKRaLdefoMlTfMGAJn6smy3wVTK +APr5Rl2PZkhVDEeslB/vAd7We7oYo7JVyrpT2NOT9kYxCmvONnFu1jDJ3x3vg12I +idfOnfNTmhxab1Zsil7aIoqWgE4UQh5CsbChzGDcMZCGFbnP3hxRVPxFR6GCfSVP +Ja6W0ZWZCK1cXkayz9PywlQ5JdJKmjdH1qHci8arBPp89OTL9CL9k+bOPTvp5UvD +3KMEaxoumwsrCw92OO3nptHLXv2mcaFGC/Y1YEvvv0Bpb4lD56Kv/6Uizv5tud2E +0WWY63fJ+31C9pweMAZVMEHZwSI1iq5fgNfFGMnS1h8dxJLolozEgHZY8lTaL4fx +BazgRGbsEX/Qb93Ld91ZXXXLgMcBNN5shsa7K/IsmMUGWtiUzlUi4tBIit5QmOvj +ICQ7UxkzLE/LGat/8tk8823NSrLHMTetjhpHY8PTQf497E+rAnIvcHlZok3HTiHX +YMxGSJxOJI2ff0x3byiwZPf/dgZwK3/LDn5ck4LZx1qXdY6/3Dg6vFxCP4avJdnj +rpJk5BstSwca7l8d7PwfAVz9tKYxzhSBDcnk06o728KNfdYALy6h05jSbxq3d6Gm +0+Mh+ydr9E9uRT913TOVcsyXVwRrIHqBOJfxlyG/0HYKpN36WfXMDPeDZAzZ6Xbq +7SPjjVhEyPCK/gCOHDriBppWHDn/GhGwS8Jv+fWiSyLmqSSXtgO1oxrGo19ugV1S +K3or6AbePviDUkk/1NRDokMyHeE7TWZmzllmiZ2d2rwI+/+l43zWitStRW7xH9Nd +YFo343oWHkX/se1jB3EKHmSmDmLa4etmKXN1oIxzl7lRedRMfIb8RtqZ2lX0pcw8 +UwRcVAe6tMqRvUNMArZO55AUJ/3PyFL0m/OaxUqDt8JzsM2i4V4qfnoELo8vGmzq +UWYyDlcpPBznvEPRGa6mDq5359VIFAQGlySpXmFIwoTH3EkirY3j7DnmFC1d5kvM ++JggJElqkY/QUM0BrXrmUEzKH2AxjfffUKJNBrufTnkzyJLxasUbBO5f5wiIMLXU +IlEZaepep5tcgIkQsL5Kff9p5vIL2IrikhmdYCX9Npy8g3Ks+18mPXGb5wG2rgts +ZzDg7y0tyjMeLM7YZzHDjMD4qqIOmirEgfHxEGgLYn+fW4N1JiftzmU8dH9Z/Edr +xnNT+uxlNboT3e/QMD00/xDsI4vl8Wknk3YQEGvD6a89sBUBkEg4sAUMA+WZfaUP +Z/D8M9vhDsR1V70LtOWA6fld07az6JBer+A8M1Wq55hjVrR47DNa4xy3CeTxqLji +Yk3Dp4KAWJMAR14i2aLWTSshwkdHg8Sy1adCvrR/NdwOiBQq3uUAuA8jiCvSxQTu +LVU/atAAtOFRIGWitiDDn5l2Zyqqeo4dgHlB51cNvv0xRc0csi/ijddguEX+Ok10 +xbhylmCKDh3h7Lh1fA7kTdfW6joTSnIvJHJR4BRU7+41bLkxWZfLpkRivvFGKFm/ +lBx2HPZo9DAa5owqkNb0NzINWwSlgR3NQ8wNGmjHsrKiHGzi3vgNlYvlX653X71l +2n/5iDmmPR0uxJJm2nux2BhzYHr8rT09FLJOElSJLv5yDKkrgkNm9HNdFB6ZDsEw +as3rz31RjCnjp6SRUcw/+cjVrPklN/CFH3t44OAUnF+zGCrMAZ2Gvu7pStnYEf4q +cpLyw/KxCJSwOrwp4+Oe08y+6YH8ja+HXzUlsIrtJRaBdKnXbzUNX43Hz/0Ku2ov +D71ID06Oj26FWpE0EzfAKzPXxwKOMOK0I588v1EzBxMPMAkX5vKNv1ibs0V1whYl +ZdQd4Cyudq0NPSzr9TGRlCd6i4YsNqfkvXqPjwuXTNxzLZTxhUFIrkOwFKZ3UzeD +dLaksE4Aknpe699BHwfbDDk5Sb5tXl0VHZvSoCSckLHryULXysINJKEWbPDiap1S +nOxpJUTd5FI6bOVM1UzHtg4E+M7n1+zCIjKRi+1JFKTzvzKJEWyhIwaSdzyCScI3 +Pl7DsleNI9cnAObdHSg/kZpqJyO1NUgHm3X1KXoI3A/NtmCOfIcg1vceur5G0ZiY +SWEYvKgqNqZ+FpxgQuTt1hXkyaLyvFs1k54MTurPE3ht3oZP/FvDP0YS9W7U3yIB +5CrRWMOKq8j91ollzBwCPuGQ7+TSSHVVJafkYsAQdVe5y5rdMcWfHLN3U9hDQnlJ +jSii/4+AtuUR95BVojtIaw/FcfH+LS1Wnersy3SGNJ2j0wMSwy9oqFAMdWYFGjZU +iMNMt3BFxAaQwLOz+WWAFh6PMissdM5B5OLzYxZ7gQ+0ohYp7pO+snwfQIQHjzJM +CC33CqOjiB1bJP0tCnPUCidXwuqHn78o8hzesexx9HRbtScdZehj5R0ccyq5yY4C +qAMj6mTvrQ2/EKalfMnFS+UmGyD9W+ZkgMF62HIh+0x2Kce4e32mkWji9MRFBtL9 +Yhn6qMciA8noDdaQb/lnDgI/kBMNUSFsQcCynkHffvRWumaSm2+e55Mga8LpLpMb +47JcYBhcNn02S8znhP15z3b96SkRh7xHAGc5ALpIy/k8GNjr/b/bWACy2npliLQz +GFqwMhRSNmiKZ4v1CPaGE+2/Dy5DWpq/7sobQFNCOnnDO/BYX5/vZYzLZF37chSd +FaPZa/0pXiGr3z3O7xZGx9rLon2TgITzoHIxw/vSNt5saI+/iyeGfmN7EnfTzNfd +Iy9DTS5FqVddAdFtPc407zb8AsuRVW5Hw38Fri4B1Vu/JflJPfIGr2HIgll4wsFN +JKtzYcx34/Xldak4wNfPIlj/UoQ3zFjj5Ov/01MDO3cafvoL5l313W/g2v9k+J5k +iLA8gu693kAqH6zL3Zn0jCS7aIoTdN8P49GHs5xGAMR3n65Kw/Ow+9v8KrQ6JRk3 +ugMqqg6wsC07SiJ+zJOsN2HnYCX4xhI8RnDixzdYxWx7kmtMbgLedzhafgMoHx+v +E3vWnDegioTGMuoIjFGpxu/3PpL+tkHypx8AWz0PLumYsj+8KlV4haNBu6v6w1Q0 +rtkZ3NE6ywu82mVrMiD+wUOx5F0cqBp8IUOzMdUmJmz+NQxepBSXLJySFqHOBUGh +yRvVLtVefdg3UqyW2oiL3jNRUksZmZDEcM9djhWNJE7wmbIAoE/gH5fWulavMvp+ +3MVdS0KXmQGXiXqK4VF4yspoSdPsmG8VnZO3YRH+FEJPzjV8oN0LaAMAHNrrw+YN +4j/V4pJkVSdEYVyMJq1w+rICEds1KG6XGngryyh6OlR6kdQDzcUDm29IY8Ml603K +/LeG1roUjniL62u1UeZngyZilY4bi1FETg7+ckCwfmAwLyH8SJFEvmpPK/H2NrF9 +w/AE+QHTL8BNDbM4NBgqmqfrKggFFf/eFE7AxrUceMZZXBfG9N8DfhOiyt70JjSj +9+UjBeRLuZ/JhjrKo01bFPjtbQFoy/2yo2IqYPYCo8G6VN2y/qQKHLs7IQ/zyShQ +XNhPezFO3P0wpw4QpDfkQJVyrCEzoEohmlCoiSelFgMhHywvFowLA3xHNM519O7i +ZROF16uDE3qcOcIPQA4Me4g5ZCM8aouWwRbV45zpRMV4gnMoCBp4VUIrnkXQmfHv +hlV5uZZE9PB5Ms6Xb9GPRbpFkTbFXaan2PoetESI+cfw3HtjSdUv2w== From 81a8d29c5bd4f2e406a78402f48aa3d9d6397a3e Mon Sep 17 00:00:00 2001 From: Demetri Date: Wed, 18 Jan 2023 23:10:41 -0800 Subject: [PATCH 020/413] Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos Signed-off-by: ddimatos --- galaxy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/galaxy.yml b/galaxy.yml index 2ce8af29e..b8c9397ee 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -81,7 +81,7 @@ build_ignore: - tests/requirements.txt - test_config.yml - changelogs - - venv + - venv* - make.env.encrypt - Makefile - make.env From 239c04737364502fc49b5929c3f973d1544f9c17 Mon Sep 17 00:00:00 2001 From: Demetri Date: Thu, 19 Jan 2023 09:14:34 -0800 Subject: [PATCH 021/413] Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos * remove redundant entry on last line Signed-off-by: ddimatos * remove redundant entry on last line Signed-off-by: ddimatos Signed-off-by: ddimatos --- meta/runtime.yml | 2 +- tests/sanity/ignore-2.10.txt | 3 ++- tests/sanity/ignore-2.11.txt | 1 + tests/sanity/ignore-2.12.txt | 1 + tests/sanity/ignore-2.13.txt | 1 + tests/sanity/ignore-2.14.txt | 1 + tests/sanity/ignore-2.9.txt | 3 ++- 7 files changed, 9 insertions(+), 3 deletions(-) diff --git a/meta/runtime.yml b/meta/runtime.yml index 0798808bc..dbba1c7ce 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9,<2.12' +requires_ansible: '>=2.9,<2.15' diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index c362873c0..51e13b014 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported @@ -81,4 +82,4 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index c362873c0..51e13b014 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported @@ -81,4 +82,4 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file From 20bdea300d13ff67b83563b065cb74ba24203dcd Mon Sep 17 00:00:00 2001 From: Demetri Date: Fri, 20 Jan 2023 11:42:24 -0800 Subject: [PATCH 022/413] Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 428f5d602..66c00acda 100644 --- a/Makefile +++ b/Makefile @@ -37,8 +37,11 @@ CURR_DIR := $(shell pwd) WHO := $(shell whoami) HOST_PYTHON = python3 -VENV = venv +# VENV = venv +# VENV := $(shell echo $$VENV) +VENV := $(shell echo "$${VENV:-venv}") VENV_BIN=$(VENV)/bin + ZOS_PYTHON_DEFAULT=3.8 ZOAU_DEFAULT=1.1.1 # Test if docker is running @@ -712,4 +715,4 @@ help: # If you have formatting issues; try `cat -e -t -v Makefile`. # ^I represent tabs and $'s represent end of the line. # -# If you need to debug your makefile command, use `-nd`, eg `make -nd vstop` \ No newline at end of file +# If you need to debug your makefile command, use `-nd`, eg `make -nd vstop` From 6b9cb62e1b520b04fcb22f5d9d19cad89b270d51 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 3 Feb 2023 00:12:04 -0600 Subject: [PATCH 023/413] Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment --- changelogs/fragments/588-zos_copy-emergency-backup.yml | 5 +++++ plugins/modules/zos_copy.py | 8 ++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/588-zos_copy-emergency-backup.yml diff --git a/changelogs/fragments/588-zos_copy-emergency-backup.yml b/changelogs/fragments/588-zos_copy-emergency-backup.yml new file mode 100644 index 000000000..393a0f50d --- /dev/null +++ b/changelogs/fragments/588-zos_copy-emergency-backup.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system + to its initial state in case of a module failure only when force is false. + (https://github.com/ansible-collections/ibm_zos_core/pull/590) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index f984e9195..9e3c7ad09 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2233,7 +2233,7 @@ def run_module(module, arg_def): # Creating an emergency backup or an empty data set to use as a model to # be able to restore the destination in case the copy fails. - if dest_exists: + if dest_exists and not force: if is_uss or not data_set.DataSet.is_empty(dest_name): use_backup = True if is_uss: @@ -2261,7 +2261,7 @@ def run_module(module, arg_def): volume=volume ) except Exception as err: - if dest_exists: + if dest_exists and not force: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) erase_backup(emergency_backup, dest_ds_type) module.fail_json( @@ -2370,7 +2370,7 @@ def run_module(module, arg_def): res_args["changed"] = True except CopyOperationError as err: - if dest_exists: + if dest_exists and not force: restore_backup( dest_name, emergency_backup, @@ -2382,7 +2382,7 @@ def run_module(module, arg_def): err.json_args["dest_exists"] = dest_exists raise err finally: - if dest_exists: + if dest_exists and not force: erase_backup(emergency_backup, dest_ds_type) res_args.update( From 959555d881d45cd93f363bb33235cad974a8aed5 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 3 Feb 2023 00:17:59 -0600 Subject: [PATCH 024/413] Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml --- changelogs/fragments/enhancement-518-text-converter-import.yml | 3 +++ plugins/action/zos_fetch.py | 3 ++- plugins/action/zos_job_submit.py | 3 ++- plugins/module_utils/data_set.py | 3 ++- plugins/module_utils/system.py | 3 ++- plugins/modules/zos_copy.py | 3 ++- plugins/modules/zos_fetch.py | 3 ++- 7 files changed, 15 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/enhancement-518-text-converter-import.yml diff --git a/changelogs/fragments/enhancement-518-text-converter-import.yml b/changelogs/fragments/enhancement-518-text-converter-import.yml new file mode 100644 index 000000000..691a57273 --- /dev/null +++ b/changelogs/fragments/enhancement-518-text-converter-import.yml @@ -0,0 +1,3 @@ +minor_changes: + - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning ".. warn:: Use ansible.module_utils.common.text.converters instead.". + diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 4423a2985..dd2172fc8 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -17,7 +17,8 @@ import re from hashlib import sha256 -from ansible.module_utils._text import to_bytes, to_text +# from ansible.module_utils._text import to_bytes, to_text +from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.six import string_types from ansible.module_utils.parsing.convert_bool import boolean from ansible.plugins.action import ActionBase diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 7e7c9833f..6dcadad05 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -15,7 +15,8 @@ from ansible.plugins.action import ActionBase from ansible.errors import AnsibleError, AnsibleFileNotFound -from ansible.module_utils._text import to_bytes, to_text +# from ansible.module_utils._text import to_bytes, to_text +from ansible.module_utils.common.text.converters import to_bytes, to_text import os diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 24d6d5500..8cd7199f8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -18,7 +18,8 @@ from os import path, walk from string import ascii_uppercase, digits from random import randint -from ansible.module_utils._text import to_bytes +# from ansible.module_utils._text import to_bytes +from ansible.module_utils.common.text.converters import to_bytes from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 5bb5fed20..90b9d1013 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -17,7 +17,8 @@ from sys import platform as SYS_PLATFORM from subprocess import Popen, PIPE from ansible.module_utils.six import binary_type, text_type, PY2, PY3 -from ansible.module_utils._text import to_text, to_bytes +# from ansible.module_utils._text import to_text, to_bytes +from ansible.module_utils.common.text.converters import to_bytes, to_text from shlex import split diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9e3c7ad09..7a32b1bd4 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -664,7 +664,8 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) -from ansible.module_utils._text import to_bytes +# from ansible.module_utils._text import to_bytes +from ansible.module_utils.common.text.converters import to_bytes from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import PY3 from re import IGNORECASE diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index a930e3458..dd43310b9 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -275,7 +275,8 @@ from math import ceil from shutil import rmtree from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_bytes +# from ansible.module_utils._text import to_bytes +from ansible.module_utils.common.text.converters import to_bytes from ansible.module_utils.parsing.convert_bool import boolean from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, From 6ef9a97a72f399b1ebaa257c112910a5ae07523a Mon Sep 17 00:00:00 2001 From: Demetri Date: Mon, 6 Feb 2023 15:33:13 -0800 Subject: [PATCH 025/413] Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos * lexicographical order targets Signed-off-by: ddimatos * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos --------- Signed-off-by: ddimatos --- Makefile | 992 ++++++++++++++++++----------------- make.env.encrypt | 562 ++++++++++---------- scripts/mount-shr.sh | 92 ++++ scripts/mount-shr.sh.encrypt | 71 --- scripts/profile-shr | 230 ++++++++ scripts/profile-shr.encrypt | 197 ------- 6 files changed, 1118 insertions(+), 1026 deletions(-) create mode 100755 scripts/mount-shr.sh delete mode 100644 scripts/mount-shr.sh.encrypt create mode 100755 scripts/profile-shr delete mode 100644 scripts/profile-shr.encrypt diff --git a/Makefile b/Makefile index 66c00acda..4f1f6f58e 100644 --- a/Makefile +++ b/Makefile @@ -10,24 +10,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Makefile is used to assist with development tasks that can be a bit tedious to -# create and often recreate. This provides a simple repeatable means to perform -# regular development actions and encourages better practices by simplifying -# tasks -# This makefile relies heavily on a paired shell script `make.env` which should -# not be renamed. The contents of the `make.env` are encrypted to adhere to -# coporate operational requiements. The format will be published should you wish -# to edit or create your own version of `make.env`. If you need to edit the -# `make.env` be sure to use this makefile to manage it by: +# Makefile is used to assist with development tasks such as running tests cases +# or setting up a python virtual environment. +# This makefile relies on shell script `make.env` which should not be renamed. +# The contents of the `make.env` are encrypted to adhere to coporate operational +# requiements. If you need to edit the `make.env` be sure to use this makefile +# to access the script: # (1) make decrypt -# (2) vi/edit the contents as needed -# (3) make encrypt +# While of some of the targets work without a venv, it's higly recommended you +# instruct make to create you a venv where it will perform operations: # (1) make vsetup -# (2) make build -# (3) make bandit sev=ll -# (4) make sanity version=3.8 -# (5) make test host= python= zoau= name= debug=true +# Optionally you can override the makefile's env var VENV to instruct it to +# create a `venv` based on your requiements.txt, you can do this by: +# (1) export VENV=venv-2.11 +# (2) make vsetup req=requirements-ac-2.11.12.txt +# Now all make targets will use the venv you assigned to the exported variable +# and also a directory `venv-2.11` will be created and populated with files used +# by make. You may consider pyvenv so that you can change your python versions +# to meet the needs of the various ansible-core versions. # ============================================================================== # ============================================================================== @@ -46,85 +48,170 @@ ZOS_PYTHON_DEFAULT=3.8 ZOAU_DEFAULT=1.1.1 # Test if docker is running DOCKER_INFO := $(shell docker info> /dev/null 2>&1;echo $$?) + +# Unit test to skip +SKIP = tests/functional/modules/test_module_security.py divider="====================================================================" .PHONY: help Makefile +# ============================================================================== +# Makefile +# ============================================================================== -## Encrypt the configuration files with a `.encrypt` suffix for files -## [make.env, mount-shr.sh, profile-shr] with user specified password. -## If no password is provided, you will be prompted to enter a password for each -## file being encrypted. +# ============================================================================== +# Run a bandit security scan on the plugin directory +# ============================================================================== +## Run a bandit security scan on the plugins directory, set the severity level. +## Options: +## level - choose from 'l', 'll', 'lll' +## - l all low, medium, high severity +## - ll all medium, high severity +## - lll all hight severity ## Example: -## $ make encrypt password= -## $ make encrypt -## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. -encrypt: - @# -------------------------------------------------------------------------- - @# Check to see if there is an unencrypted file(s) to encrypt, you would not - @# want to delete the encrypted version if the unecrypted is not present as - @# there would be no recovery process. Then check to see if there an - @# encrypted version of the file, if so delete it. - @# -------------------------------------------------------------------------- - @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ - echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ - rm -rf make.env.encrypt; \ - fi +## $ make bandit sev=ll +## $ make bandit sev=l +bandit: + ifdef sev + @echo $(divider); + @echo "Running Bandit scan with sev=${sev}"; + @echo $(divider); + @. $(VENV_BIN)/activate && bandit -r plugins/* -${sev} + else + @echo "No bandit sev (severity) has been set." + endif - @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ - echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ - rm -rf scripts/mount-shr.sh.encrypt; \ - fi - @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ - echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ - rm -rf scripts/profile-shr.encrypt; \ - fi +# ============================================================================== +# Build the current collection based on the git branch local to the computer. +# Currently, venv's only manage python packages, colleciton installation is managed +# with paths, if we wwanted to install it in the venv to not dirty the host, we +# could try building a similar command to pythons venv: +# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections +# ============================================================================== +## Build and installa collection of the current branch checked out +## Example: +## $ make build +build: + @echo $(divider) + @echo "Building Ansible collection based on local branch and installing." + @echo $(divider) - @# -------------------------------------------------------------------------- - @# Encrypt the files since we have verified the uncrypted versions exist - @# Note: we should move make.env to scripts as well - @# -------------------------------------------------------------------------- + @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ + ansible-galaxy collection build && \ + ansible-galaxy collection install -f ibm-ibm_zos_core-* - ifdef password - ifneq ("$(wildcard scripts/mount-shr.sh)","") - @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh - endif +## Build the changelog, this should be a release activity otherwise the generated +## files should not be checked in. +## Example: +## $ make buildChglog +buildChglog: + @. $(VENV_BIN)/activate && antsibull-changelog release + + +## Update the documentation for the collection after module doc changes have been +## made. This simply calls the make file in the docs directory, see the make file +## there for additional options. +## Example: +## $ make buildDoc +buildDoc: + @. $(VENV_BIN)/activate && make -C docs clean + @. $(VENV_BIN)/activate && make -C docs module-doc + @. $(VENV_BIN)/activate && make -C docs html + @. $(VENV_BIN)/activate && make -C docs view-html + + +# ============================================================================== +# Cleanup and teardown based on user selection +# ============================================================================== +## Cleanup and teardown the environment based on the level selected. +## Options: +## level - choose from 'min', 'all' +## - 'all' will remove the venv, restore any temporarily located files +## and ensure config is encrypted +## - 'min' will restore any temporarily located files +## and ensure config is encrypted +## Example: +## $ make clean level=all +## $ make clean level=min +clean: + ifdef level + ifeq ($(level),all) + @echo $(divider) + @echo "Complete teardown selected." + @echo $(divider) - ifneq ("$(wildcard scripts/profile-shr)","") - @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr + @echo $(divider) + @echo "Deleting python virtual environment 'venv'." + @echo $(divider) + @rm -rf $(VENV) endif - ifneq ("$(wildcard make.env)","") - @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env + ifeq ($(level),min) + @echo $(divider); + @echo "Minimum teardown selected."; + @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; + @echo $(divider); + @rm -rf $(VENV)/make.env + @rm -rf $(VENV)/mount-shr.sh + @rm -rf $(VENV)/profile-shr endif + @if test -e tests/functional/modules/test_module_security.txt; then \ + echo $(divider); \ + echo "Restoring 'test_module_security.py', previously removed to avoid execution."; \ + echo $(divider); \ + mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ + fi + + # Unsure really need or even want to do this as part of cleanup + # @if test -e make.env; then \ + # echo $(divider); \ + # echo "Found uncrypted files, encrypting them."; \ + # echo $(divider); \ + # make encrypt; \ + # fi else - ifneq ("$(wildcard scripts/mount-shr.sh)","") - @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh - endif + @echo $(divider) + @echo "Default teardown, deleting $(VENV)" + @echo $(divider) + @rm -rf $(VENV) + endif - ifneq ("$(wildcard scripts/profile-shr)","") - @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr - endif - ifneq ("$(wildcard make.env)","") - @openssl bf -a -in make.env -out make.env.encrypt - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env - endif +## Cleanup and remove geneated doc for the collection if its not going to be +## checked in +## Example: +## $ make cleanDoc +cleanDoc: + @. $(VENV_BIN)/activate && make -C docs clean + + +## Copy your ssh key to a `host` or the default which is your username. If you are +## copying a key to a production server, a second key will be copied used by the +## jenkins node, this minimizes the number of times you must copy a key. You must +## have set up a venv `venv` as that is where the environment script and configurations +## get written to manage this make file. It avoids continued decryption prompts to +## force users to set up the venv via `vsetup` +## Options: +## host - choose from a known host or don't set a value for the default operation +## which is to user your username to look up your default system +## Example: +## $ make copyKey host=ec33012a +## $ make copyKey +copyKey: + @echo $(divider) + @echo "Copying SSH keys to the managed node authorized_keys." + @echo $(divider) + + ifdef host + @${VENV}/./make.env --cert ${host} + else + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --cert ${username} endif + ## Decrypt all scripts used with this Makefile using the user specified password ## Files include: ["mount-shr.sh", "profile-shr", "make.env"] ## If no password is provided, you will be prompted to enter a password for each @@ -136,15 +223,15 @@ decrypt: @# -------------------------------------------------------------------------- @# Check configuration files exit @# -------------------------------------------------------------------------- - @if test ! -e scripts/mount-shr.sh.encrypt; then \ - echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ - exit 1; \ - fi + #@if test ! -e scripts/mount-shr.sh.encrypt; then \ + # echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ + # exit 1; \ + #fi - @if test ! -e scripts/profile-shr.encrypt; then \ - echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ - exit 1; \ - fi + #@if test ! -e scripts/profile-shr.encrypt; then \ + # echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ + # exit 1; \ + #fi @if test ! -e make.env.encrypt; then \ echo "File 'make.env.encrypt' not found in $(CURR_DIR)"; \ @@ -155,271 +242,139 @@ decrypt: @# Decrypt configuration files @# ------------------------------------------------------------------------- ifdef password - @echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin - @chmod 700 scripts/mount-shr.sh + #@echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin + #@chmod 700 scripts/mount-shr.sh - @echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin - @chmod 700 scripts/profile-shr + #@echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin + #@chmod 700 scripts/profile-shr @echo "${password}" | openssl bf -d -a -in make.env.encrypt -out make.env -pass stdin @chmod 700 make.env else - @openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh - @chmod 700 scripts/mount-shr.sh + #@openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh + #@chmod 700 scripts/mount-shr.sh - @openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr - @chmod 700 scripts/profile-shr + #@openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr + #@chmod 700 scripts/profile-shr @openssl bf -d -a -in make.env.encrypt -out make.env @chmod 700 make.env endif -# ============================================================================== -# Set up your venv, currently its hard coded to `venv` and designed to look first -# to see if you have one before trying to create one. -# @test -d $(VENV) || $(HOST_PYTHON) -m venv $(VENV) -# ============================================================================== -## Create a python virtual environment (venv) based on the systems python3 -## Options: -## req - a user provided requirements.txt, if this is not set one will be -## created for you. -## Example: -## $ make vsetup -## $ make vsetup req=tests/requirements.txt -vsetup: - @# ------------------------------------------------------------------------- - @# Create the virtual environment directory if it does not exist - @# ------------------------------------------------------------------------- - @if test ! -d $(VENV); then \ - echo $(divider); \ - echo "Creating python virtual environment directory $(VENV)."; \ - echo $(divider); \ - $(HOST_PYTHON) -m venv $(VENV); \ - else \ - echo "Virtual environment already exists, no changes made."; \ +## Encrypt the configuration files with a `.encrypt` suffix for files +## [make.env, mount-shr.sh, profile-shr] with user specified password. +## If no password is provided, you will be prompted to enter a password for each +## file being encrypted. +## Example: +## $ make encrypt password= +## $ make encrypt +## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. +encrypt: + @# -------------------------------------------------------------------------- + @# Check to see if there is an unencrypted file(s) to encrypt, you would not + @# want to delete the encrypted version if the unecrypted is not present as + @# there would be no recovery process. Then check to see if there an + @# encrypted version of the file, if so delete it. + @# -------------------------------------------------------------------------- + @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ + echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ + rm -rf make.env.encrypt; \ fi - @# ------------------------------------------------------------------------- - @# Check if files exist in venv, if they do we should not decrypt/replace - @# them as they could have edits and risk losing them. - @# ------------------------------------------------------------------------- + # @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ + # echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ + # rm -rf scripts/mount-shr.sh.encrypt; \ + # fi - @if test ! -e $(VENV)/make.env && \ - test ! -e $(VENV)/mount-shr.sh && \ - test ! -e $(VENV)/profile-shr; then \ - echo $(divider); \ - echo "Decrypting files into $(VENV)."; \ - echo $(divider); \ - make decrypt; \ - mv make.env $(VENV)/; \ - mv scripts/mount-shr.sh $(VENV)/; \ - mv scripts/profile-shr $(VENV)/; \ - else \ - echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ - fi + # @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ + # echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ + # rm -rf scripts/profile-shr.encrypt; \ + # fi - ifdef req - @if test -f ${req}; then \ - echo $(divider); \ - echo "Installing user provided python requirements into $(VENV)."; \ - echo $(divider); \ - cp ${req} ${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - fi - else - @if test ! -e $(VENV)/requirements.txt; then \ - echo $(divider); \ - echo "Installing default python requirements into $(VENV)."; \ - echo $(divider); \ - echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - else \ - echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ - fi - endif + @# -------------------------------------------------------------------------- + @# Encrypt the files since we have verified the uncrypted versions exist + @# Note: we should move make.env to scripts as well + @# -------------------------------------------------------------------------- -# ============================================================================== -# You don't need to activate your venv with this Makefile, but should you want -# to, you can with vstart. -# ============================================================================== -## Start the venv if you plan to work in a python virtual environment -## Example: -## $ make vstart -vstart: - @echo $(divider) - @echo "Activating python virtual environment 'venv', use 'vstop' to deactivate." - @echo $(divider) - @. $(VENV_BIN)/activate; exec /bin/sh -i - -# ============================================================================== -# Deactivate your venv -# ============================================================================== -## Deactivate (stop) the venv -## Example: -## $ make vstop -vstop: - @echo $(divider) - @echo "Deactivate python virtual environment 'venv'." - @echo $(divider) - @. deactivate - -# ============================================================================== -# Build the current collection based on the git branch local to the computer. -# Currently, venv's only manage python packages, colleciton installation is managed -# with paths, if we wwanted to install it in the venv to not dirty the host, we -# could try building a similar command to pythons venv: -# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections -# ============================================================================== -## Build and installa collection of the current branch checked out -## Example: -## $ make build -build: - @echo $(divider) - @echo "Building Ansible collection based on local branch and installing." - @echo $(divider) + ifdef password - @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ - ansible-galaxy collection build && \ - ansible-galaxy collection install -f ibm-ibm_zos_core-* + #ifneq ("$(wildcard scripts/mount-shr.sh)","") + # @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin + # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + # @rm -f scripts/mount-shr.sh + #endif -# ============================================================================== -# Run functional tests: -# ============================================================================== -## Run collection functional tests inside the python virtual environment (venv) -## Options: -## host - z/OS managed node to run test cases, no selection will default to -## a system registerd to your user name, see make.env -## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11 -## no selection defauls to 3.8 -## zoau - Z Open Automation Utilites to use with the collection, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1 -## no selection defaults to 1.1.1 -## name - the absoulte path to a particluar test case to run, no selection -## will default to all test cases running. -## debug - enable debug for pytest (-s), any value will result in true enabling -## debug, default is to not define a value so that it evaluates to false -## Example: -## $ make test (runs all tests using default users system and dependencies) -## $ make test name=tests/functional/modules/test_zos_copy_func.py debug=true (run specific test and debug) -## $ make test host=ec33012a python=3.9 zoau=1.1.1 name=tests/functional/modules/test_zos_copy_func.py debug=true -test: - @# -------------------------------------------------------------------------- - @# Expecting the zOS host, python version and zoau version to use with - @# generating a configuration for us with zTest helper. - @# -------------------------------------------------------------------------- + #ifneq ("$(wildcard scripts/profile-shr)","") + # @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin + # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + # @rm -f scripts/profile-shr + #endif - ifdef host - ifdef python - ifdef zoau - @echo $$(${VENV}/./make.env --config ${host} ${python} ${zoau})>$(VENV)/config.yml - else - @echo "Option 'zoau=' was not set, eg zoau=1.1.1" - @exit 1 - endif - else - @echo "No python version option was set, eg python=3.8" - @exit 1 + ifneq ("$(wildcard make.env)","") + @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env endif - else - @# -------------------------------------------------------------------------- - @# When a quick test with no options and defaults are acceptable, a - @# lookup using the users usersname is mapped to a default of known - @# zos targets registered in make.env - @# -------------------------------------------------------------------------- - - $(eval username := $(shell whoami)) - echo $$(${VENV}/./make.env --config ${username} ${ZOS_PYTHON_DEFAULT} ${ZOAU_DEFAULT})>$(VENV)/config.yml - - endif - - @# -------------------------------------------------------------------------- - @# Check configuration was created in venv/config.yml, else error and exit - @# -------------------------------------------------------------------------- - - @if test ! -e $(VENV)/config.yml; then \ - echo "No configuration created in $(VENV)/config.yml "; \ - exit 1; \ - fi - - @# -------------------------------------------------------------------------- - @# Check if name='a specific test' and if debug was set, else run all tests - @# -------------------------------------------------------------------------- - - @if test -e tests/functional/modules/test_module_security.py; then \ - mv -f tests/functional/modules/test_module_security.py tests/functional/modules/test_module_security.txt; \ - fi - ifdef name - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest $(name) --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest $(name) --host-pattern=all --zinventory=$(VENV)/config.yml - endif else - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --host-pattern=all --zinventory=$(VENV)/config.yml + #ifneq ("$(wildcard scripts/mount-shr.sh)","") + # @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt + # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + # @rm -f scripts/mount-shr.sh + #endif + + #ifneq ("$(wildcard scripts/profile-shr)","") + # @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt + # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + # @rm -f scripts/profile-shr + #endif + + ifneq ("$(wildcard make.env)","") + @openssl bf -a -in make.env -out make.env.encrypt + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env endif endif - @if test -e tests/functional/modules/test_module_security.txt; then \ - mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ - fi # ============================================================================== -# Run the sanity test using docker given python version else default to venv +# Self documenting code that when comments are created as expected, the help +# is auto generated. Supports multiline comments when comments are prefixed with +# 2 pound signs and a space, see examples in this makefile. # ============================================================================== -## Run sanity tests either in the virtual environment (venv) or docker if there is a running docker engine -## Options: -## version - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', no selection will run all available python versions -## Example: -## $ make sanity version=3.8 -## $ make sanity -sanity: - ifeq ($(DOCKER_INFO),0) - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements --docker default && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements --docker default && \ - cd $(CURR_DIR); - endif - else - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements && \ - cd $(CURR_DIR); - endif - endif +## Help on how how to use this Makefile, options and examples. +help: + @awk '{ \ + if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \ + helpCommand = substr($$0, index($$0, ":") + 2); \ + if (helpMessage) { \ + printf "\033[36m%-20s\033[0m %s\n", \ + helpCommand, helpMessage; \ + helpMessage = ""; \ + } \ + } else if ($$0 ~ /^[a-zA-Z\-\_0-9.]+:/) { \ + helpCommand = substr($$0, 0, index($$0, ":")); \ + if (helpMessage) { \ + printf "\033[36m%-10s\033[0m %s\n", \ + helpCommand, helpMessage; \ + helpMessage = ""; \ + } \ + } else if ($$0 ~ /^##/) { \ + if (helpMessage) { \ + helpMessage = helpMessage"\n "substr($$0, 3); \ + } else { \ + helpMessage = substr($$0, 3); \ + } \ + } else { \ + if (helpMessage) { \ + print "\n "helpMessage"\n" \ + } \ + helpMessage = ""; \ + } \ + }' \ + $(MAKEFILE_LIST) -# ============================================================================== -# Run a bandit security scan on the plugin directory -# ============================================================================== -## Run a bandit security scan on the plugins directory, set the severity level. -## Options: -## level - choose from 'l', 'll', 'lll' -## - l all low, medium, high severity -## - ll all medium, high severity -## - lll all hight severity -## Example: -## $ make bandit sev=ll -## $ make bandit sev=l -bandit: - ifdef sev - @echo $(divider); - @echo "Running Bandit scan with sev=${sev}"; - @echo $(divider); - @. $(VENV_BIN)/activate && bandit -r plugins/* -${sev} - else - @echo "No bandit sev (severity) has been set." - endif # ============================================================================== # Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) @@ -443,19 +398,38 @@ install: @. $(VENV_BIN)/activate && ansible-galaxy collection install -fc ibm.ibm_zos_core endif -# ============================================================================== -# Check the version of the ibm_zos_core collection installed -# ============================================================================== -## Get the version of the ibm_zos_core collection installed + +## Copy your ssh key to a `host` or the default which is your username. Then +## copy the super share mount script and profile for the mounts, execute the +## mount script and exit, upon rmote ssh, `profile-shr` will be located +## at `/u/${user} where user is defined in the make.env `host_list`. You must +## have set up a venv `venv` as that is where the environment script and configurations +## get written to manage this make file. It avoids continued decryption prompts to +## force users to set up the venv via `vsetup` +## Options: +## host - choose from a known host or don't set a value for the default operation +## which is to user your username to look up your default system ## Example: -## $ make version -version: - @echo $(divider) - @echo "Obtaining Ansible collection version installed on this controller." - @echo $(divider) +## $ make mountProfile host=ec33012a +## $ make mountProfile +mountProfile: + ifdef host + @make copyKey host=${host} + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" + else + @make copyKey + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr + endif - @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ - |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; # ============================================================================== # Print the configuration used to connect to the managed node for functional tests @@ -474,6 +448,7 @@ printConfig: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi + # ============================================================================== # Print the make.env contents # ============================================================================== @@ -489,6 +464,7 @@ printEnv: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi + # ============================================================================== # Print the make.env contents # ============================================================================== @@ -504,6 +480,7 @@ printMount: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi + # ============================================================================== # Print the make.env contents # ============================================================================== @@ -519,186 +496,235 @@ printProfile: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi -# ============================================================================== -# Cleanup and teardown based on user selection -# ============================================================================== -## Cleanup and teardown the environment based on the level selected. -## Options: -## level - choose from 'min', 'all' -## - 'all' will remove the venv, restore any temporarily located files -## and ensure config is encrypted -## - 'min' will restore any temporarily located files -## and ensure config is encrypted + +## Display the z/OS managed nodes available and configured. This will show which +## systems you can use in the host argument for `make test host<....>` ## Example: -## $ make clean level=all -## $ make clean level=min -clean: - ifdef level - ifeq ($(level),all) - @echo $(divider) - @echo "Complete teardown selected." - @echo $(divider) +## $ make printTargets +printTargets: + @${VENV}/./make.env --targets - @echo $(divider) - @echo "Deleting python virtual environment 'venv'." - @echo $(divider) - @rm -rf $(VENV) - endif - ifeq ($(level),min) - @echo $(divider); - @echo "Minimum teardown selected."; - @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; - @echo $(divider); - @rm -rf $(VENV)/make.env - @rm -rf $(VENV)/mount-shr.sh - @rm -rf $(VENV)/profile-shr +# ============================================================================== +# Run the sanity test using docker given python version else default to venv +# ============================================================================== +## Run sanity tests either in the virtual environment (venv) or docker if there is a running docker engine +## Options: +## version - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', no selection will run all available python versions +## Example: +## $ make sanity version=3.8 +## $ make sanity +sanity: + ifeq ($(DOCKER_INFO),0) + ifdef version + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --python $(version) --requirements --docker default && \ + cd $(CURR_DIR); + else + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --requirements --docker default && \ + cd $(CURR_DIR); endif - - @if test -e tests/functional/modules/test_module_security.txt; then \ - echo $(divider); \ - echo "Restoring 'test_module_security.py', previously removed to avoid execution."; \ - echo $(divider); \ - mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ - fi - - # Unsure really need or even want to do this as part of cleanup - # @if test -e make.env; then \ - # echo $(divider); \ - # echo "Found uncrypted files, encrypting them."; \ - # echo $(divider); \ - # make encrypt; \ - # fi else - @echo $(divider) - @echo "Default teardown, deleting $(VENV)" - @echo $(divider) - @rm -rf $(VENV) + ifdef version + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --python $(version) --requirements && \ + cd $(CURR_DIR); + else + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --requirements && \ + cd $(CURR_DIR); + endif endif -## Copy your ssh key to a `host` or the default which is your username. If you are -## copying a key to a production server, a second key will be copied used by the -# jenkins node, this minimizes the number of times you must copy a key. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` + +# ============================================================================== +# Run functional tests: +# ============================================================================== +## Run collection functional tests inside the python virtual environment (venv) ## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system +## host - z/OS managed node to run test cases, no selection will default to +## a system registerd to your user name, see make.env +## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11 +## no selection defauls to 3.8 +## zoau - Z Open Automation Utilites to use with the collection, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1 +## no selection defaults to 1.1.1 +## name - the absoulte path to a particluar test case to run, no selection +## will default to all test cases running. +## debug - enable debug for pytest (-s), any value will result in true enabling +## debug, default is to not define a value so that it evaluates to false ## Example: -## $ make copyKey host=ec33012a -## $ make copyKey -copyKey: - @echo $(divider) - @echo "Copying SSH keys to the managed node authorized_keys." - @echo $(divider) +## $ make test (runs all tests using default users system and dependencies) +## $ make test name=tests/functional/modules/test_zos_copy_func.py debug=true (run specific test and debug) +## $ make test host=ec33012a python=3.9 zoau=1.1.1 name=tests/functional/modules/test_zos_copy_func.py debug=true +test: + @# -------------------------------------------------------------------------- + @# Expecting the zOS host, python version and zoau version to use with + @# generating a configuration for us with zTest helper. + @# -------------------------------------------------------------------------- ifdef host - @${VENV}/./make.env --cert ${host} + ifdef python + ifdef zoau + @echo $$(${VENV}/./make.env --config ${host} ${python} ${zoau})>$(VENV)/config.yml + else + @echo "Option 'zoau=' was not set, eg zoau=1.1.1" + @exit 1 + endif + else + @echo "No python version option was set, eg python=3.8" + @exit 1 + endif else - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --cert ${username} + @# -------------------------------------------------------------------------- + @# When a quick test with no options and defaults are acceptable, a + @# lookup using the users usersname is mapped to a default of known + @# zos targets registered in make.env + @# -------------------------------------------------------------------------- + + $(eval username := $(shell whoami)) + echo $$(${VENV}/./make.env --config ${username} ${ZOS_PYTHON_DEFAULT} ${ZOAU_DEFAULT})>$(VENV)/config.yml + endif -## Copy your ssh key to a `host` or the default which is your username. Then -## copy the super share mount script and profile for the mounts, execute the -## mount script and exit, upon rmote ssh, `profile-shr` will be located -## at `/u/${user} where user is defined in the make.env `host_list`. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` -## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system -## Example: -## $ make mountProfile host=ec33012a -## $ make mountProfile -mountProfile: - ifdef host - @make copyKey host=${host} - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" + @# -------------------------------------------------------------------------- + @# Check configuration was created in venv/config.yml, else error and exit + @# -------------------------------------------------------------------------- + + @if test ! -e $(VENV)/config.yml; then \ + echo "No configuration created in $(VENV)/config.yml "; \ + exit 1; \ + fi + + @# -------------------------------------------------------------------------- + @# Check if name='a specific test' and if debug was set, else run all tests + @# -------------------------------------------------------------------------- + + ifdef name + ifdef debug + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml -s + else + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml + endif else - @make copyKey - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr + ifdef debug + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml -s + else + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml + endif endif -## Display the z/OS managed nodes available and configured. This will show which -## systems you can use in the host argument for `make test host<....>` -## Example: -## $ make printTargets -printTargets: - @${VENV}/./make.env --targets -## Build the changelog, this should be a release activity otherwise the generated -## files should not be checked in. +# ============================================================================== +# Check the version of the ibm_zos_core collection installed +# ============================================================================== +## Get the version of the ibm_zos_core collection installed ## Example: -## $ make buildChglog -buildChglog: - @. $(VENV_BIN)/activate && antsibull-changelog release +## $ make version +version: + @echo $(divider) + @echo "Obtaining Ansible collection version installed on this controller." + @echo $(divider) -## Update the documentation for the collection after module doc changes have been -## made. This simply calls the make file in the docs directory, see the make file -## there for additional options. + @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ + |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; + +# ============================================================================== +# Setup the python virtual environment, the default name is 'venv'. You can +# override the default name by exporting the variable VENV: +# (1) export VENV=venv-2.11 +# (2) make vsetup req=requirements-ac-2.11.12.txt +# @test -d $(VENV) || $(HOST_PYTHON) -m venv $(VENV) +# ============================================================================== +## Create a python virtual environment (venv) based on the hosts python3 +## Options: +## req - your requirements.txt else a default one will be used ## Example: -## $ make buildDoc -buildDoc: - @. $(VENV_BIN)/activate && make -C docs clean - @. $(VENV_BIN)/activate && make -C docs module-doc - @. $(VENV_BIN)/activate && make -C docs html - @. $(VENV_BIN)/activate && make -C docs view-html +## $ make vsetup +## $ make vsetup req=path/to/requirements.txt +## +## Override the default virtual environment name 'venv' by exporting var VENV +## $ export VENV=venv-2.11 +## $ make vsetup req=requirements-ac-2.11.12.txt +vsetup: -## Cleanup and remove geneated doc for the collection if its not going to be -## checked in + @# ------------------------------------------------------------------------- + @# Create the virtual environment directory if it does not exist + @# ------------------------------------------------------------------------- + @if test ! -d $(VENV); then \ + echo $(divider); \ + echo "Creating python virtual environment directory $(VENV)."; \ + echo $(divider); \ + $(HOST_PYTHON) -m venv $(VENV); \ + else \ + echo "Virtual environment already exists, no changes made."; \ + fi + + @# ------------------------------------------------------------------------- + @# Check if files exist in venv, if they do we should not decrypt/replace + @# them as they could have edits and risk losing them. + @# ------------------------------------------------------------------------- + + @if test ! -e $(VENV)/make.env && \ + test ! -e $(VENV)/mount-shr.sh && \ + test ! -e $(VENV)/profile-shr; then \ + echo $(divider); \ + echo "Decrypting files into $(VENV)."; \ + echo $(divider); \ + make decrypt; \ + mv make.env $(VENV)/; \ + mv scripts/mount-shr.sh $(VENV)/; \ + mv scripts/profile-shr $(VENV)/; \ + else \ + echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ + fi + + ifdef req + @if test -f ${req}; then \ + echo $(divider); \ + echo "Installing user provided python requirements into $(VENV)."; \ + echo $(divider); \ + cp ${req} ${VENV}/requirements.txt; \ + . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ + fi + else + @if test ! -e $(VENV)/requirements.txt; then \ + echo $(divider); \ + echo "Installing default python requirements into $(VENV)."; \ + echo $(divider); \ + echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ + . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ + else \ + echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ + fi + endif + + +# ============================================================================== +# You don't need to activate your venv with this Makefile, but should you want +# to, you can with vstart. +# ============================================================================== +## Start the venv if you plan to work in a python virtual environment ## Example: -## $ make cleanDoc -cleanDoc: - @. $(VENV_BIN)/activate && make -C docs clean +## $ make vstart +vstart: + @echo $(divider) + @echo "Activating python virtual environment 'venv', use 'vstop' to deactivate." + @echo $(divider) + @. $(VENV_BIN)/activate; exec /bin/sh -i + # ============================================================================== -# Self documenting code that when comments are created as expected, the help -# is auto generated. Supports multiline comments when comments are prefixed with -# 2 pound signs and a space, see examples in this makefile. +# Deactivate your venv # ============================================================================== -## Help on how how to use this Makefile, options and examples. -help: - @awk '{ \ - if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \ - helpCommand = substr($$0, index($$0, ":") + 2); \ - if (helpMessage) { \ - printf "\033[36m%-20s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^[a-zA-Z\-\_0-9.]+:/) { \ - helpCommand = substr($$0, 0, index($$0, ":")); \ - if (helpMessage) { \ - printf "\033[36m%-10s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^##/) { \ - if (helpMessage) { \ - helpMessage = helpMessage"\n "substr($$0, 3); \ - } else { \ - helpMessage = substr($$0, 3); \ - } \ - } else { \ - if (helpMessage) { \ - print "\n "helpMessage"\n" \ - } \ - helpMessage = ""; \ - } \ - }' \ - $(MAKEFILE_LIST) +## Deactivate (stop) the venv +## Example: +## $ make vstop +vstop: + @echo $(divider) + @echo "Deactivate python virtual environment 'venv'." + @echo $(divider) + @. deactivate # ============================================================================== diff --git a/make.env.encrypt b/make.env.encrypt index f1b9636a2..d07e7032e 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,275 +1,287 @@ -U2FsdGVkX1986PbRMb2EokSrLE9lJ2+nW9OfuyA0vNn39kfHerFqT6axJCldzuZS -6cIbHi/WZtTpwjxUUKChgjSrLtZ9o4IlDPBn5qxMMxtVLhfJmVDwOvUy9NvAtHJz -tfIOOZvvZzDTJ7ewkywkyxk94JseMdn5/GhiHinpOin29cNYZ6cuxeYE5Lihz9tt -3vP/S211Oi1LGYjd5kfuoSrfr/7Pkdhd/nkyiL/r7yOtPtmSeU++D5VQsbLzYfRz -Umo983TNZJ25w3FMsrbZWR/1EPXd5Dv4+S+FAmZES8YSi4lwmnFbHnnJhB9XBV8R -K/+puWCgOH8UiAdymvaaAlG2rRzu8jQtitrOnSbONOCJn0+Fh/wW53JVsGdLjUS6 -a2ZKP+g8G3KR1aJkPG2NdjG+4IzoCaa5G9/YPP+tZI+6rKrPm3piabtxxerp+N4d -fzwWrmz8CL1ICnU/0ySzIGdKYDnF6oB57vRVWanUYknAFq9s+tofGnt0c/T3X9V5 -aOKuX+7XbLEapuvsR4ghd7uYhi6eh6s8vmc9gFYJo39tcU92M2w+8bz51CskwXDM -WEcYLnue9/yUK3fdE6CMTbFtIhlXDw2IAA82rracXoCEPZFtDSvROG0W5WLMb52J -xLE6DJ4nPAPYAfgcj7xzRzir7WRgclrWrnDXCCXlDcxXO38BgZNJ9Cu0+f6Ys3dI -1yyAagxixTJw1u6Etk9ictbr/QQYWhQwqXPkPSrelPbJ5chQrdoxN2MrPuRUm4Ui -QNa0ug6eV0bsolaqlwCbbxoqmZlf9Aga1ePoFhMicj3Jzy/8A8NIx1LhiZuLnqiV -QRhnlIVUL1fD3HojXxqVyQVM8pqAb22uzdS881gomH6BEK7B+v7gcYKGcglCMaFW -fUqx0EyHIzBTGtwldrymbshmLgcYlfuYl40eYCF9l0PDN9/azw4xstv91VF85ZR9 -5lZ88Q6/3rMBfDS8ZxwEXDIoJ78giMqjHReaQgUtVUzEgXJyTkXCH8GS5S9Ct7YB -09Gf/e4IU5EYEWO+Y6vCXyIpY23cE/mzBLTDichT4L64chc4qUX7ogr7YvEvU9LS -Ga1OOaS4mJHqmZUahGa3aDsCx5Aozs3R1Js5Em83Rr6lK0fQVevCqVagbcIrVUls -vOnugf+0wAo3YaOeJypT5JkH4JlwPO5Gfm9YJ6rvvQTugkwp6BfxnRt91oJzPoOU -LgzbLYcYnersdpXoQIvnUPsF7cPxdY5+rS/cllSx+dnoHzqaNrqOkfhn7PScIerT -fGOWrPd5gH4uOKshc/bTybp4lSgbBEQGjD3HUjuFxyfbt63MsqxB58BV25tzmabG -VJq3Z2HbD8xlKWhoplFR/QW7RpQ7yyuzyRbF9a2M4dwSP51XkNMzA30OjvqzWrUu -6s0vDppVM8iTT9XE6SyGSnKEEOIm9XzXEsVD1ZlE38QJxYo1kl6DMPRiDq+okaWl -Kbv0693AZulL2hHXtQvMufxFNNAfoO5jk8Jr6rRVXMpsRRneYn53cyAzMuk17SQD -X4LfN78mOTc/6qadfv3t7ZNCBeT9pEYWfqhed2hk3CzvfdqceX2dimTNySxcNMDu -ukrG6vJfaO16HuVnXDT/V+WDBmEDhaeadrjDS8u0AcBGbtGxjXIHqoMJf8sxEqDV -cRQuAKaRwQcAVbUoF8pwWLvEDBpw47kZCVm6202FwI8DQngWVHlJDCvDoU86n9ks -9WUSuHJSWXoYurLdXU+0HQPGcwEbjvn9GXK8UyPlF3CZmz9RJT5Wr4Wu4p1ZACqD -4QMW8vvST4yuT3mZEndqrSNCvWf1M24jhap4HY6eKSTdHaEAEhIpnbcaR+pBIiH8 -QiK/hTRsTqV5cFQYN3x3hIQQHGIFXgutSmjuIWQQ8kPVize0qE5qdgzWmCIQtqp+ -OVZ1sux46edT2sAN2cwuL4b2sc8G3yMrEA4L6imf8Ea4mRQEQvf4RD5A9Eq36z0K -SJzvva9HHZK7NIMsY4yRt1GWTyNyzApJ4dywzDc6cLvf42O/NSlHJ0NOANDUQ4eS -mbQT+oZHwCWA13W4XAlaLesakfc2E8KFMyIv95j8BeySp8gnrGz51wGMIeLeuDRP -g2CfvXZzPfea7jagkqfMJ1+q7pI/ItrQ7ccrIwfg4gQL7gEE4trGiOvw9RjTC+Lc -wm7Wj6bEVXzONR2izvFnF+PAmdPfV6xMFrGpHaQzUcvqQ5bHIgNLl11IIas2tZH5 -RK0c2K2COaX5ZmRflPUK32vCkRgCJ0x5b61X66q3J6XNxOb91RAU7BKhft9Ud+tn -gT7RWFZNQ11nfui7kplDaLEmQac4dIcYw69n3QobaQgGEzJyNqdKHFW9dJYRRyy5 -cKuHQUl4xvY/AA+/bL1HqXJFAobLQ0O/eginE0lG0qpqOuERizIUsV5ZDYLv4nWi -6qpxpIzkTiipNfyU2jNTZnSebSKlDJTFwsXM4RDHcecqFTRYwtnGQJYzaUnrRGPW -zkUy39VyT7BepcpPVbmi3PSW3LeB0FmIJ3dMihgiAHAa9fuAFItX16VnNuXuTvYf -ylHa04LNjl1iaqSo7vjejLeNrbUplSKAuhvlFVi1PolGjglX95qMyh4KiI/UYCE8 -7YngRWqYWEjlUK94UzYrCBI/snlwXUoMLfGXzFbbPnccPR0q6AbSf6bjNGns6EH7 -5349eOHjB9kSQsNU2viDxX6TIGi4T6X3Yu9NeE4mbIaU/nC2NjkYMfKDzer2G/aK -xQhC5P1k9LVKvtj4VTQqe+/sfCMvy5hJGvmKJfmT6MzQ2wRtqO+A4xDguNcPc/ti -uduNCcxhs0d4UmbchoSmE03C+oQ+Ql1Hj8OAOMoPeCisBuVhaFNQ7g68t6xVuS0d -YLlqtSVbE12W5mzZ3CRslKDOLzRWFW6vp7AY+eO1vXPg19B0BVLwuSG8t45AIzNd -GkkYiNABfZc7oj4OLi4ONxPzrPvNGuN8tytFtXbL9mAa4v0DagUu2O+4fdnGsShY -FrriLGry1GCd0wWECuZ4TeFB6+qYJs41Ksqe2aK5w4njZABKv4IrqMX3qdzGpzu9 -xS+ob7gzunScYlkV4epfAJIZXZYtf1qiHkK1V3ButA3rlQT0vK5c5UNLJRr+0ebc -DJtlHUaCixQ582mm4rjbu7yOeYek+Cu5Y/MVbHAcGJ6QC6wm5FQSE05pcD54JdZa -9LF6raw+APkNanBW8hJNSFf/ZN17Lf9bkACeq8TKlF6feH6mHOKokxCbQMTeJVLU -/4Z2eRhUajN0mXePITbxiDAvCWImsx0qD8BIX5CStGI0LGK1eAEbQWs3PKrctXm2 -l3RxNMhskAa7KX4NZr7tsd45oa5znMwCKxsLCqLEY8G13fAt6PfFeMWTC2AkrbPo -tRVVFcy7/VONbBl9+OGZjD2ZeOYKy8rqNLxKjwpxjzi0cQWx20NUHPjc+E6m5eiz -pSJoxvydLCyNwgoL5RkjZWHURfIaurb8dQx/08nQeiEtHC8RRxNcrE2nF8u3redw -14LTQNZkx1XLgWxFt/KoCjd/GEDxN9Z9sB9HNYIKxq4RA/bx398SPRYMe7NBJw4j -vshpelYHXb2Mq8jQQBehGEV0cTtr8yHekP2og4EevSXN8bcGZ6+kxADjaixjvg1W -uQL0omvE2rBGxkC5zADmAn4QAbbVhwu3xHU7/1fKFTTjCEumFzY7rvaWz2/Unvb+ -xd+FpwNH+rAzyXB93hJ5ZjmQkkzdOm0YbD/xS0wrPBeaziG4JXAIORSGGUL4QwRx -O0ae7N+cPzQbZAGty7YMo7twPY5IzeE0cF+7MDPirEI6oLfQAyqA84jLoFasQ0nE -aOKE63P92nQY5dxmsWP1YYTiGdRW/vUyC+6lhoufu/KB5gXO+n/HV0Iot6p7dX0X -FfCjBoT8Gco1zFgoa7OuLW5Z0ZaNLBTeYp2j519T0CTKFlXBN2Fd1Cj9hufELgXJ -mMa3Ykey0VO1N/Yv2CeVUR9KlKBT49Ax2EcJmNizIpdcdMJ0oXRSoBjHOPxHwMEG -BCyCh/BhSVOjp4mqkvhVmXHLJu0OV+QeF2A7pKQx3eVCQx1eEkO7mB0JUHFQkxUy -I10dWM77g/MBvOqFNvk8EI3ifeC11l+BXfq7FrX/Ne/MupgJPTllQBEVMGv0+LUE -UXshO4iCaR4UqGz4IN4TLSmVWo+FGGFvfFTd9CocwAf36OGf4p0lMYmceXCL0Ojm -Zr3JMa4XEblDwFQcfjyXxuFkzqGaRjCNn8hXvgabyyyywCBTKL5PLuGYNFsbjE8r -sCdQ3ggh8hdcJAHWMUKvj5PoPOlrHg0ZJLaYEXunpl/VpmJH0gGwP87KG/3MbDX9 -pijHjT0ba58uE2mFDkAz8ZDykdWKwpfoO3wVhZkvsHffxGkTM7hcRdPU9H+aTkdu -wgs3oVAI1MvsvyhWycNXG/Hl+KwgwhJDRbH58kKzjJApfkR72nPmHPeZJd0Ovt7L -FxgOGB5K4MewoJCMSz9uXALStvv182kyj2izsTCH9EcQjJMhYHcXSRra6x6HNWtS -KSqajU19bnWJb1kduxLY0HycVXhema9nvUVt2exmyL0q+0loDqh7MCZqtI0oMD11 -jlcYl/Krr2dSzun5rlNh1Q4ufCvFttwUiQvPakTqYvrGK6pU20LHTFm+AxrDZI2M -SIYT2vGLj1hZEU5pWpw+hiFFDsgWQ0ui/Gu9tNzzwmprtBsw//qmBTmVnXWCa1Sn -60bj9/8zGVTHN3iBFr190W5PsNh867kgX8D+sspSb//JCSCm9H87GsC94zjyGL2G -jlvM+Flmwm8DhWfa1tH8KEoKz9c1YPj0N07NNRy/XlyHP3t3srrdpiizTj3HCmxp -0mxyhaa8zsoMmWN+FabVGHeyv5j3faGPgmSpqc6q7hSl0CivAmUyPm51kfyjsAxZ -oDuP0ijIZtTUTQcW6V2yaMthc9lQbpwX/DfjK0VAwCeG2sQs8fPtUfFgbPa9NQAJ -lCB0r6s7B+ZEtlY7bkg4iOav82/RoXlPLDAKVeLRPYR2/v1hvLlFn5BEuJgnxBGB -b9yrIMb/qu4a45l3gXRfaPWSZqQo9/FKpD5DBWTVUsoayvMelA4KwYltFwinsD3b -SO0towg3JVQLMLCs3xKqVAj9A9Dfnvlia2draldJggb9gAQ+YA+2kxCYl9MeJWwL -sxSXdwO+3zfKnwaoGt1MadXUIldPS+ocWNoQq7Yk2aHZufxawpsF/5TfXAPb5J+r -5pgxLx83gqIFbrOlrJjrh7BRKEbEYaVUO6S0HdGAbkCQXGOi7XAP+EWze0Jht5As -sopeONoBxE5wk6pj/glcr46q0SJ72uqSsf/+nEvD2QmT6o+/fJKqrwSYo5X3K3pS -OCgeunKkqKvWWpXSH5pcy3cVVVjui5dgyBfYsDJtGvLnsk+gCudmkpJGuBiAr0Mp -amowujtzaYjYqsqjdAUj3L3ib55EUqHhCMo6JnykBJyiwXT4u5GSVVDWw7sb5cV6 -B9xUFXsrnT52+WdHaujJYNlFo+eV1dm0EGhWh65tVgwfKHl40nFjYY77Mk2/aTBi -RqAssGcn+ODSuDZuO4Le0HcH5VcdaLQt9Y6uV02fPs2D9Kjj5SszkCnjUAqYanio -L1naFHEer40zQ5hCyKio+Z1AdIWEYRxYHQKHy8ED2zTAjWjk2/eldUvCGC688fF8 -n0GmwJHZ34a5buSZ9Z0rYQnqdTxIsKCWvlmaonvC5QpiJveIhH/WKNQ2Hhjyt7I1 -VaQjnwKOC7qFjJyW+kRNF/gTjw+AiojWeaYsb2AVmKJJtepd1XvznRSdeuV0VAvF -oN365AHY2NYjosMBZIUbom1tma6HLb945PC1WE5SN2VJcp6kMBoRCMkparz8g0aP -Rgm64ecXkTnf4QuMIWsTB1PEeS1ZymfngxUPgDj7ltEpuJ1lU/kTxI6o78w+0JqC -ww/UnY+3c2ZjpFCKdIOB4b/SvpwVrO8vYh8i/75DS5J6Ouva/ea8HGx/I2dDDJbo -DqHDtA9ggJIBz3Z/T15ySBFVeosDWELVNwfF5hSI2J78b9j+4xDy3htkGtFBKhFg -mdJIz9x+N+1UdTjodc1o84fNi3BLGYgUQvK95UrEMeU+rDuDhoWH1kWQvjXqfcFO -DVNxTmtnKVG8/l/LDyarDLGmW/mBmf0pUYfC2+C3qX/5fxH2CLMhSGbrNsrfyugf -MhCwthOI8NX473fHvIc24WqdxK2yYl5NYfR0TGablw42JdUsnmf/30lOm9jIZfqU -EzA4kVni0RkfTzrttGnhcpbpud+a0JMeLT0eLi6hlL6CEO9c6xpvjp+nDj5kE9tU -Yc/Qyw5CvLhsuFWc3uRd2XBF3S0XYsPcQCRi+jyp7S444vr5aOOFwQ89QkT9Wxzw -AB9qH+oZ695AXhLnQV93v46LwxopmYJ2krF/YHqst9lT9DepvOa+Oh9CVpMmOwAv -u4XYrPSOnCq4pCOrd2ZfBofdpYl9jvVgEjXB+53TraThfNXTBjphv4Z9o9+hBVnH -CmBL43t0e6FKYZV2BbZUR0uBpTE2ri/Tw17ZXJnY6s0seDXKXPImiQiSoo1fQFEB -8VQMDSXFcMg9r0Ru8unF9C79gITfa7l17cIx1G2bYWCPEOggn5srcU8xB2tD6ywp -Cfcx1ztwbxMrpCaXbg4yTyd1rzha/LkcuoSU2Y/FbfjKgqafWTHsHAYyeazJTKC6 -JXl8mXT6jAO7+AXQ68dhyUjWs4pKMZ/rSUV+c3FHfDOzgDUHy3K9rDQUfILVv1n9 -HDNYoTgAjFA8OIF4OQIReHqudgGltj0M+V3EtU9yeaWbfiCRTcma08sAhCvTU7G+ -yR11hcgkOpgj0OupYQ34iRWabvDSXcqG1pzB0kz+MWJOSG69t8k1RjBdneTW9i6H -TkF7tMRil2Lftyx7ZckscC53ICCbrrNJkBzZw7SGnoDJNwKfXgnJ8l1gCJBNNncs -SWI6ke27Jr8EDk99vXBsZ1Vql9TWeFAz7PqlmUoWwcWQVtlUZO0kCCGXBFsz0oeY -m8kWpHNnh6GXG8+Q5vOwV+pLPbdaB+/qufiFLKdCDsUmErb9bz8bhVb0foStESFq -HpaH1B0y6fHbOwrTPt//4uaEIKQBvcKnl1EYHscPWHwZ5LB4QDIfaiCSP3GVAlcO -WPNMdCuucmcYv2vxKmgrGzrUnjYFc9pYuShCmZCjkb0VfDROjx1l/j4dDOsCGPpv -tUJx0HsFpIK+2dl3DCN0JNXBJ6PPreHqqXnq2kYkwYWZjPYnCnUKAq9A4eTuxmvz -bQWGLASSdXrNjNbTAQi8dQvPLDrSK+Ao3c3Ji7UT4sBH3CfawZhgM0HzNum0T3NB -5OHZ3YBvyYY8PNCrDipquhiiH2T21X26FKTGvh9lSBFvF2QOSgYHJ+uEo9X69BQ4 -jpqady1CIycSR6nd+ux2RJZVoQ7m/r8jQ+gfawd08zZdTcI1GMpoh2/gRwIJ6815 -GEduBBlu1gYLkM5XGmGvgCLnP0iNiFQ+UC9E3gnPpMx/NWV2eiVnI6M3DaRsNEt9 -rIhR3ll719lj+IdXybaVInxv6KwT7VWTVYmXf/DOoGplsC3sUQcQ9mxczGIEFKXj -eompVnbLjjLrB4XRRMQ/N+fCrdp+yplBE35RUUEQuYXPelAniO7zNRF6h988PK1L -QNJJxSzCveKxnYkUZbAAUJk0wXliCW38JlGvRsT/3LBZkd8BN8Cd2Hp457Gg1Xsp -YNBV6FLiSMJtxaJSojYlYkV+KvjwaAGMw80WQ7klAyA77DmBRlyDLorug64QVWDC -ooyJjkRcdZYe1Y+oUY1yREYWVA94F7zeoPCgWJlF1oZ5W6Ampc6gBx77yTZPz+WU -BX8GFYOP0347Dh2+OLEIALq2itQsw1/Wgb+tUa30xcUMjWdvadT2YYVGkTm5ffwS -bpYMTzhed+Slh4pJd+I86HBSWIqzi8qpN9G/G/X1iMA5ZLAFoK666lmInaFvsi1x -Bd0hoa0VXLsXKITJRXoKgom5E0A/0UXslreA4EdqAoS7ce4dcF0GbbmNC/abY4AZ -cMWde9XqUhW4qnl+A1CAUX11lwXiJjY/wfmosIneHGGIcKOYqdjVQuF/sZgsirM4 -HneU5zGVrxW9nlvceUJjEwJFvXVsIUBds9LVY/GjD5eBrd+waGO5KziAn5oAGbjv -cRVo/bqS72JSpx1vdVGDghoFJpLhfy62hInWE0ST1Ggmv4EYaxplp+KkT092F+BK -d2q4KMJ+dgIZ/zBCtB4OsXbpVLZLUO1cB4N6J4w+8gbKLG5RhUNIRycXJF3p0Mf0 -FjVmI1lCDi4m0I2+BfkUkrFgHoDo/DUsgSffzlvspq+keYLuzlUeZGbfU6QQuKFU -8yC7HdDEc1Y/TylbtqdCWslZisHGxsDaOWE/qubLA/Fb01V3OWk5f7ROfIenyfgF -HWA5wcRtUhhPyu1gENQDI1tXAN0quOgbrO0gjApPL42CR5VSUXmBmsNACtm8qkeV -Whvjem4ei8bbW+tUkNGidJMkArBBgISudFFLaIOymX+RV5JVQXSN+1O332VGT9RM -YPfRziBSfMgO+7OXEWN3t6p93KOZtZxoX098PYLFxNwSKAXMHU8Msscz1AkJbRTC -uZw9lAbWQd2CdJW8Us8gegujVftF2HCyj5XYeWzZvQUDklb0GhURzh/Thx9HbuWs -p14rat/v3NoHaE2WOhJkj4BuU3H4JizEMq1wMFksNKbtsQa9ms0UKH8jmMnZod0M -xTIOoelcOObeEoEztCX8lHVXYIkmaVPqOU8EPgfLk/O5HZKtDHuieR2K/2PDcLyK -uMM7hoPTZI8LTXw09bhML9dgJJY0xKycJbWcBsI29VDvwaEk7pztk1RcVFU4m3nz -SWnhinsCRkLZyy3FXWtNRaLUAJWHfCZdjrTM1OuJio1Jk1jkm/aGAESOt9F7++Qv -316WOZZ1cdLq5HqNlnmvZswM5xyM3tDt0Wjnjx93G4m8aDtG1f8+5+Q8tzeegmAz -9ksO7mKenJtX+9vCIkyit/6SWa+EZmwNVfpY/4n/xexsN74bSuZIwDEDYfTcCqbd -1iTiAMX1Kt02XhrLCW+MoZWWy2FXyTrR6JEp4pwvwzeDBNZpZrvM0Lti0M+cpVeF -7jK1EAUXskVL+wGTsc19O9Yg0VVK+o1h7GVRpogfHcPHPa+3558U7yb1S87p3Huy -0g8FX0O7AlUN8AQ4slZm+eXPOUmW7DAZ09RZwrUKih6tozpI+i8cImPo1WTQxmPX -WbLu1SbX4cX7FRLWci2puYaBsZifaasY0J6K6rkqzbMc1onB81QLH8T32VrdEdrT -a0lQXPbGMI3MLIAMxAyLHmkP/el60ZAQyqOHK89D3+fBdJL49cuPq66qdRmYkdqK -wJBqtJBjFMAh+WNMMzezV+fCI3+fSpZgsBlLqdUq/COz/8PdHpFFimkcjM+V5nE4 -BR/t3eioEfU1XjUZua2xrVKw3B4q7UpnKvbFQVKnzKOT9mEta01I0HjhLAApuAPP -G8ytLhf/SEwm0hrxP1XFN2+e2WXSOvXmnvVAjIaOxpw88yZAbKRhq4Vhdx179G01 -aXec3xWjrVzVqwKGbqCsyOukqmj/K2zu8R/eDeRDauCv/6J/JzNoFlBOp953va9B -PoAADmGLkUQJlWWtj4KKJTZxQIMhnsFs2FuZCY+7WGuZAVWxkIc0P3+SBlwIOKBI -Ob6KxQAI6K2NgDoLnZhJ5DsD5rDM15u7C3uWm7igjUVf1IOkCT5m9GZY3ZLpbLbB -3YMkOvCAR56FntmL8BtnRUUJ9cDZSvtaJhjq9UX5KCf5S3zN0+GhzLRRRuaTWT0Y -zF8DX8VFU8RybIssW2gM5DWz+k8UyhFmfpXObwxjNEm0ssoj3IHG4J1j2ssnPiso -WFO2W/U/dC4oc85mo4mvEcbAV2QxlYsSLpIdYzRfiQQGt7BmFOaE0saIqIxSD3lJ -6FHJsK2PA33uxRRzwgP13IAziOLdE6jAf2RGzjg7SrGc/kId90Kfn3C+oDAqOAkB -SikAA6SudIwEgwQiIA6XEgFEZNf6yFj7MbABgpD6pzJZiRx0b2AL1UzJQMnV6qPU -Y2WduJZIKmnwMYkEhVEVjUVQPbvdfoepHVoW1U+MrrgxyCFmQpT+GBDOJ5S4rzQ9 -kL6h9NJBWs+IRFbQDhWj3fXwhgBDxJggmL2tHerhEl2i+MVpEmgsLkj0ODGwxz43 -uBI9mrGbSf38rKIXQgInaqe3qQRV+gydpoatuOH6JOREURHUJSv7vt+glm7vUTOX -55JxxUorIEqDPD/AQeUxFNp68V6eoY3yirVMwOVG8VGW01t5KpFZulnlNzKKjGJY -v7Mj+Du3gheEpM8/cdEGML0SB7yhFiQfEXWWZNuyDaqLJTVqgX0bm3LB+Ir0rGxG -YTwtiOAZRCrBdXWTllTEf0XOiWLEIwbXSOicLvKRXNVE1NBLSjTVZBjN6IaXyIKb -vYB+NaAJEPGs4BqzpVrQFtQpoYho21k2s0WuJGtHo9leofy1LMwtkDjbtifIWXE3 -devuZ1CtpXKfit1lrX3g8dy0lxKJHJyBhMA9yS6aN++kecb8FdgXQIaWJsxP3ZsN -VXs7fc6w3nLIkYozGHpCB6GELolyQMfDDSt/yDSTcD2oRwtiR9MfjX902MrgVIdk -lVdcm7VvPqX5/Mvmouh/KEu9oAbZDxNbkCOJs487qJQ6p8ZzXx0vh7Co8eu3XAy4 -gFf2L4DI7O0q3iy1ObWZB+3vVvDzUZ8Jx8H0pRRHKSf+xito9XMuN2DpDrCzugEn -9x58sVbwnfdp+m2KjA24iLYLOQ7usb37jaju22Fr/nunb3wEFfBmsJ/cJzk0b6nD -Byilc46rD435al0fUAPeOZ5RqNzAefJth6jiFiPe09hi1bwUJrMNI6yekvsMQI2C -+VtW4n96Q1DVMCX91A9IYuXYpGCOBp+FACXMO8nEmCnGcm3z0HPJ7hIFV04/dBnP -RvCqNZGxjyT2o60Hq/rx0Vjdd9YRXEuT3ETlqIGgGag0wx7xWxPsCWEPxc0POZEw -I1Jq31EBXjzqRMsNoqxYkiu5yyLiFFrJ07l43qWDnHVEQiXSXlF1carexMrKvrr5 -HkCve9glK5Dbyd4RljTxinMIkXhC/IiU4g3SKOqDjYP/E0GOCqRx67S8bZ/iZ+Pr -yRaaPTVrcb72Wf8PxWDNIxl9DvM55vU0mL+/GJKu4+xZga8Gyr381fQ6nrqoRA4m -Ke9KE6bK+2N6RclMenfPHWeunGzHTVqYK8G+nXxPqEjDuKFMZmLdH8VSvb5ZrWnw -+Jxx726BOegl0F4E/9/qY0rqZQm2cMaBdrkkaIWjVe5BdZJfKyEk3thDkt8Dpzz7 -mWX/j+8KtIy61yAhevg7EMQ5LKqpUgbIqumtiyWYqkzm2gGiSM932velx1etu1C9 -6w0LzBibNuwNljpK8Jw1GWHTlAGm3hk32Zvpn4/wYEj8CERiNWV61aqZWZvUk8e+ -/oXOoL6c07EokMaBuCWuxo+/tMTEwP2P7Pj95SAN68lvu5kxkngVI6Pc8nlLT0Ld -X/dGtqpLzRvnd+yKP+XZmjNcKqzIMEN223h9HP3obrTSKGeHrsnNJCk4jwmfL0oE -LMK8Y16UN6wZmA5be6yA+aDmYV9RC5VxkgmpqJOz4mgxWwMCbZ3aKMf6+XRZgxS0 -IDmhNL930KuaNbdH1QSmc7qNNWMUz5Gvg2FKnsdBqw7RGxdtzBonQS26kxEmzNfJ -ghrEhC/plvsU/94zygto8whgSKoiQM3IS7U1FN5PPUjOsYqlpQxKmRtmgTajL+2s -MmhB0g/glCqxeK7g4OZcevyiUZ6JijCP48KRvQOnF12TE2aR1k8yeJyqk5nYixpu -9ff+cagN6bungJXymZK7kzccAF/hE/haV/dekliZjS/BBMSVdjapjGbQoQnn9LyI -30kXV5FCuiB4xwr5JG/ZAg+NaE0Te5zXbvzeKSsw4VAvuW8GIzmKGCtIyfGMuzCZ -MTxB7a99BsD2dJ5wbSS6j8JyYf24Jr7GzFoFadSlVvQIW/UR08/iekanboMu5lk+ -o/b+Pad8JCbXzHnItMbq/m8HI0ev9ZKE6EuZ/0Xne6tf6VtwZdpVdExTkFkVx41F -nWPCg6axY7ukmcPihvtZ8sDXAfL/1saVMHfaR0gfckFqqClLNruDn4b5hTRgfwgI -CT5phFswKva9uEHXJTWrXbjeujcFbgRTW7/qQkdmkODZr4E+UzJVxupax7CCurvP -4KLBUdOJeFWXtnKSlEhpEnK1zwTiS44yex7teLtOP04Yn2PlkeGw0F9MpovRjd16 -di4odTWjE2ht4QhjKyuOWQ0Kn9qRNQIQG6xAMLuSpuD6HilZ3k5ReoX1s3p0uPCT -6rc+yQNNKVLIfgwbeJuGBM3M45GJgdqOCOEQCAAyW67cWK9F7hPDzhbJxYshAuU2 -aFW3ZiWDV1qgc76k2RcQnjN1N/2hax8ZUOJ7IIQCHp2goyGYRWuaQcSg+wMvJdmP -r6+2gsB/0thTP3zqDBVXlx2xQoPxUAliUwZVH4Qv9VPHe98XTRhvuZ9lRflEn6C6 -iDG/Y4jTI7df5IM4hJkp60+S9S4HhVhIZk/vxdN8HHZ0/Skoz3/yMLMM+8UHQQ7V -uCOfNlnF0HMm1nL0sXBnmlgcPpYZ6gKV88pvS02J1nAYAC6KJuhSJudQPcQvUgvh -PmFiFnx0CuaK1EUhOhpuESAk99Yxeik+iPJlEdKv7AHMFGsF/vmmpd5C8uVZuitz -LVpXpo0Fr6LiLfYjV/Hmv8EEsBORBFIEpHw4DR0lAuqspbsTR42w1mYx61G9RKC7 -ANQPA9MB2C/4ZP0qmQBHCf5mD/9LHhicrDatmUaIu/a5/aMOjT4kkZE4NFPKqJ+9 -KWFYyRGFlDOtE4PCA1ydtCaIP4SAKdnZIW59f9MPyXS2XxEv+DkXBrz3P6HNuSEl -ZY7Mrn84t4sovlKgWNlWoKr+B4iQ/aZWkICuK6tLaBE0A0n2SNtL0drb3tIlzNlu -t9oi1lp+U6aha0gbi3HjHArCJGa5SBxIGIjRd2RiKh1vRMJcX2UgUdYLa5w7GSa+ -3nvYjZwb4bb0NeGV4xDtmh5u4FpX7d746+9B+ZIJdgxGOo4MESGUjfnHP5UQpcOo -id5Kv3VR8d1w5tOJQ+6S0z88eMMkJbekhaR0ITqxiD2LUSpMoJS6UjVppz7y24U6 -u0krSJgIpVjQdB65lJ9xxkrW/w11m4w1ngK5nETSSKXiw8jlrLHbm9N6D2+zVx7d -VGIwl+npyIZi3gXklpp/NMIZJjDBkWRxyRo4kSbvc8HHq8m1zHdjUGs2Ngt71De4 -oAyN8Aibx5MAFRpHQNLZTJDLRB4Ro0QiMXEFniXxO9Fl26KjZHyigzoyH8hfCQms -LRCFVarkPAhK7aGyvoqOnExIIqwROa82DGdWzyUbenwRVJkmFFJSe3NYO7KboGNr -iXJs7EgnIp00nWz8Pl0jOBGdFP8e9Pjt2Cjxphg2WbWLxwC11eea1nimiqTIDUA1 -35lzIFI+qvdQ7R0cGPJDflLeDuXBNuqmKLpr9RFXeFpYDkGwB+RIB7tOUzDskKqb -iCCJdagossRAHsfpOFU5kcmPXgi2R3IxkKT7D+nE1wVHbafuIOiHuF9MhihPPf/w -E0jSzpzsCh70Z76sZeOXwKMIYeqB+Zr56dawA+widA14x+CQ/SHfiWMtlJy7yUbq -du9uezQwO6Gg8H880OLAqYbLjSDRyL5YyWM1QmQ9btprxBWkmZK2Vyo+kot7+d8B -T3jeyeFLkYeZ/W0cwmf3WEf56NFyEKLG2p4Zw+uqLL1K9LcJycSXU/hDB02shh+c -w4H1rJwZL+CYka0ojJIWimvlHsT+EnUE+y6nwEAgXDcrH3cxHQzIBTBk4zKcMK8F -hZtRLI1l0M69dJGxmjSRR0N4n4lTGsHiESIXM+AjjmPOca/UP2OUx6XBE3Bq+xBu -UUpqax0ea1lzzoht6FNdbETzWtPaMd9FeSVwipuLuZJHPb72KBVThraCMLyZ3zGq -piakpfy1qRzvpDJJj1OOzp4p2FlIRefl3oisxFCYymunHWndTQADi+1cbMYo8d2H -CvsRGsPNl+CNRM3Bv5ZakhJb0edS6g7VeLe+Ow/dMsAYs5MC3+6WcRDcaDMo7rQr -yJkSzpqByFm36l++RlDtOfbdbGHY4L0uShmEZ27awpeB8uufgqIPqXEaLQVAJYQN -or9aIEA1d50m/MXlCLsdUnpDGCoVhUTGPDf4VAAlkdkaWq6pslIpzQVyDyA/qvT7 -oo5MHzK/a7I7pprryPjF/CcUUR+BivIEn6viUzhKfBUjbY02AZxFp6hJaXrJ7Omh -J2Clxbqa5U0ZduBV7XeJTeacWAZ+GuA2su+9fVXG+vo1WDuIbOcLTj7cFlfwh8Ed -HC6RoJ6TbRlH2achWLWOny9xGXsYzticMLR9EW4lAatXiggxmAA3o+LFmnOR3vR7 -U4MoGYmF5JazD9EQwU/rqOXaDawYY/uJORQ4b0RjVysYUQTGTVePG90mz++KrC99 -L5zARP+hJX3EwWO5Uam9cAFk4TQWnF3+2c8IIdmE4bx7v8PR+iD17KPvizzre8W/ -NbTbPm44fQtRjWIaes5wAthCpvN5XO8rEzWk+9/O67nt0dne2LtzwOboJBTakGHk -2Hq49Wbd673gj5F34RhiwfujFAShyYhAzJyyiwStnOR+sa4yyP7hPDg+2KQAH6rX -ShXsHFz1v9Ng3SgSi5sS+pE/KqzFNn7M4Pd9UnNxByR+uIGHbnd+AfIgOlhKBBoc -DSodFPx8nz+VGLNduY/dnR8G21xbuULrq9aIPbsGis1PoNJcA3cg3AMrIXhyxi3T -RS+azmw0dltctVEMDpg7pnkMCNS64y15evpjJgdjTj5QChU/Liwbix1iY6phnMFe -mf4b6TbKajmPoKZnC9eZc1Ik+wa2lLx8wlhfzjNMNhqHJpM3pQH/EG4znLKSXhOG -e55sdzwPxLSutKCj6SL66578mgotLieN717LmvPZuYJmSmENECfqgv9uktaxyMma -uLvpmQrALvM2hdt1jh7vxDHBpJzJXRIPkWOchE9DttfiG0n7tD6xAGWM6eCQ906w -cHFlprLy/xNtkepQFByTHrmDUCvBMcDy8Rbu9G5/NqtXbalXmCHtEEPKlPx1Vs0a -Aw8dmY1HNLfkOS/Vwkzrf2WQAtwvbkMxa/Ja3AF5sS83wYOjQYL04YYz9z1d+GRN -Kmydy0cuLuoa3+u2sjh/WxORG2ZD46wX1QTLJCom61aKB8gbvYFAKT63O82G0Yx9 -5Gm0SQb8Q3DLRpuvmah4UeLZNXy3LuNW2KTuKm9pO/a+ogHglW1q0jWW9id3X0vQ -f2uFkyD0LD2Y+st0YPaXFaBVSshVJAdpHVVAnXioEKM4hc9RAJ+8I0HxXP4Bsdit -gh3g7QaE9oRhluG0sA2ryQj5Mct0639KrrbEmic9uzVSif+vEkLTcjwKrrN6wH1I -oUbRCy06vj65u92OiU33OJxFpQNnFKAT9CYi1P/AGX8urnItEPH6PDOpqkPbVOj8 -sGFXyBk2gbVc4hGiRnh708ABSp6djx6WvHJiuc0n5cPmcRObCswx4IK6H0aVN/lc -fc/Nn/DELgii5KZ5JtLjIY7+kMZEq0Ga30l5W+RrNHhw+wrRYafeabdHSjL/3DXt -wEN51ZgFRoSw2OlLVz1iXcWPKLc0tqs5ln5ucYTj1hmff1tqGF691Mbg2DgDtKqr -tgWd3TgP9eF6HUL2Ly0BNMOhrqmVffFwUcZb5aU0jlafUCec6wQmQXsMj28gRAv0 -1N1zllXI+/InxaS6p2ZqGS7HhpWguJ8maXZpNjdwOWe+4tKT7bHsNgi90Fz8IJJY -Pv5ELHGgiHdrh0NOwKCiEa7ycQ2gSuvrFB2zPymznDOW38h+kEz7Lkt4N1NMWIBR -+Lf2GMxS2Wqr2Swoc+TtyNgDNgNQvOT/lQg6/f21+8BxU3P2duyKQ5IPsG5hkaDr -T6F3+baDjXVgfiAc6Z3pJyKG3hNzO97izTMbaY2dFZlNTDAIH7NG0gINZzEc6VNE -3ccqMDRvXkHVJwUschMhvW/GHuZf0ry9hcOpsHBd+sLkbY5ya1JV6AJ59wX/86P/ -GcXUXXBbJPxnN4wjTSvX94ApfjSJmMfPAkeLNktrpqvf49kSjmSmSzVeGHvO9yjI -k9dT6eu6pioHz+S5D5PI1zG1VhmMAvnOyefMPEFlkr1u9l7c++y9lQ4Jixsek1bC -kQoz6ikEN/tYsZe1GT3eTrZKXgpSX99uRFrdwIcsCRC9IMEfRo8y/Zy/DkGRiYkB -B5mgFgvfmWqjUVRI/vNCIhgabfx7yvMgcCc6U1lxwLcsZZKkj7zaajGukHhIbKy+ -EZAKJwTcLE/jkvuuyvGbGvuvqEI3+P7wXudFgMLc8vI6u6Lg07xW0Yyk3LKSglYn -N8D9gzVoOqglGHBqypgiNOtns+xPsm4p3xin5UFmysb3cIkHYcoDiP4Rj5S+rrfn -7Mq25ljKn93xKtQJzOLSF0dwWN+7NGAKP7yFBfyTXVp6uzJNcjHUfhcarfS/rvmT -pwEEzM7S5+Sv8+veOgudcSvF917baai/3niqqSc6tHEQm1nQNg5Mw9Iy2qIxiPkB -RKwCTKHBd9VG+H8ezqVjV2hEn9qo0u4ll/MT1wAoB4YIdWQou4VSAwrx81rf7QK9 -uppxed5GxyaLjnfOpxan1623gpyk+HFw8ezF2FpVXLU0hb3FBlUjTAbEBiM/yEfk -mJa1rZL7VBA4v5e1uUwO77P6ibaFMkd3wgfJrq93jTP1QEtXBnMpuGrQXSEmEy9Z -rjX7zGPrC32wYCTTmbfAwEetd2XtcrgupfbWm7fRR0ZXKKh7DK1Q4HpYZyd8f8ev -I29GLssQjiyTlj8q2Z4/MbvOq/nlgda0w3rrbHikxUlY63eVuuG2wKuRo1JVF+BI -BDwhlk6awAvK+OyEUyw7UTlDu10IEzaaN3oB6MbDnSD72e6Gd9vv4kja31KLm4hC -JckyLk1ZEsnzlP3GE01PtxMdjt/HXUGRLwEwaLxL550d252G/hzkObnrg+VhnV7O -qz1d5lU/pnDqBernB9Kwbzi1Tj2zXkort2i1q1t6Lf3ZX5M8htM1osAzbOIllMYa -/1LCED8yM2ZwOQYsY1R5fAzn0vk06c+FEvwxbBOywjpJaQSys2b9sxYMUEPi+xmX -xlDPPI3cDCeBh4GrDORPQwzlecd0pEG4+drBJsm2/sQfpz1xI/HekRpmidxCmrpc -9Zo7nNV6taI5Ob35P+Yez1rMw5lHcm0AZJRD3bk95ApLX0g3/n9FzVNWa9x66uGq -vQP9YH3kpvOEOpsKRCxIngaBUvH545XiQW5TAGTS5RnEdEL5jktVI25v635fA+Ae -Jy571OspVpxiJFiV2kux+b2aCrGQFMpGRDHY9Jrf7eNz3NMtzAtkwjUI0VYl1zlV -lDjdScpuYM8l94m9sUzipjzUktVQzqhO9Thjp5UQUDe30bTkH3YZ4A== +U2FsdGVkX1+EjJmnWmQEpgyUNArAe8yFuGLfSiIhYAY/a9bxa+ItpMqm5UnIsAIE +np5moXmd3pj9MXZTVJLJSSBQ/QuePjdEfL+HNZWX0Waw8GXdmIWkBkhdBLeRdXqy +0XHRXTptcXhFXx9fOenjgSxm8oblTy3P85EsbVcUrCP9DWEyvAnrFpLSFFcz2OB7 +6/wn5EgMHGhyjaTjcyfkNU+Ae7rNTCAj6Hcl049UzAFb3hGqaz8F/g0wp/mX/ThI +2qDZCsLlREPZsgfqQkW1zXPQxS08eRmUR9FsDnDH1X0dbp/9eSHu5IT3MrwawEz5 +hS/K3XkuHFpWQWWf3JBPXYl0A/m2WCM39bmg13mfnDBJsxmnEY8510zdNTHTqSUG +n9L/ffp2CpCjEDbKQJuzrfuzQ/h5I37XqKvE6OWdY/T48mDsqajGepPBmZF1WrlQ +y7ZMWIlhzxw8P2ZX1nWwchbTxy9kjKs71tk+ikKUoEU56SY+WmVjKvXZtUB3sY/Q +2Nho49hpGJuPV0tE6x8oAMW3ERZI0BYWMrLOf4bm76LGgs85WrEzz7dnaPkCaEbH +SGPx/U93f7zV0X05sdOPPRoZ4mxLxNOtgA8qAQLeAlFnzbLRlq0q4M2MQ4YZ/9HY +HTE+CW8sj35e6TRxPqfF38yisVv9JmQEhZ83qfMqrTaR4PqLWhCm/Hak3LMUNRTw +UrdGDmvhCk/BHqj/kmRyCY5Ts9Z3gktUD9OSuxBVnPxRo+exabbjGqMK0Eslwzgc +nvgba6ReKkh1xf/3PgI3A1ZOKJzkE3YctTcHV0+o26I7JW4kesTdXYIM30RgVF7X +N/akJFV42ZT8xEmYA0k/v3tM/xpQVzj8FbzAjP/d8L6etIolTJPWS/+rkgTaiZdD +EWN7kLfgzmC/OClSjuGRyLNbRed7rGuDLmKZKY+6Bd7tcLgBWHKJQKnZlJcoSx0C +MbTS29iZHmEkV1WWeTm/mrjqZsQ8/5ky4Ug73RePk+rlFovxCLSypFOxJRHR+nte +nYE7iJmTVwE008IoKTWbwxZaqbROSqKL9sw0r5Kps1Eve7oGSoNdzzCTArlteN5e +yU7pz1lZrOWGlT4pVnYnxuY7ayGkLvxqNWBG+bq6VYsh4W9oMHyW4hmRRTcvfB9B +/1B57RUM6rCxAWZYd+/T2TU0Me19NKxhwHct0zrllNwypWUCZQ4kfvQvVu5Xo2AJ +LawsQ1KUcFfVLUfq9ecQU8OSU4MYYIibPrjUqmQF7Sk22HM9cXghAWY0nPfiHTVd +jdWn1c9z8lVNk/jZVxN6H/MS8mC4BRw0tCJDWKWEkhxX3V8AvRdTOIsKYp+i2U5j +CIcVGMfAtmqg1WpJfVYLZrYI2uskjL6uNmy7XmFxdztbiZyuE37NEd7cw0q8n8d/ +mB+BBQ4x6Jd5qvlWNceaRv5QxNZOs36CxM/ta4bGBaAa47CuJ1EMo4EyoirT2Hh7 +tulT758q0HXYsKcg6EImwAYJvEws7MDGFdpWyWz2c9gQX6biNBRXfaGzoXE+hQ8A +e+qPaxO8s5yCGqiKu47KreUotbP0YePyEE+RlRIJGt8t9JGtMVz6k8MzcEVRhtWK +iX1mwjUR2saocyXATYggy7Xmooy8nLjvKqkYx5FKmL/csuwC1gdlDrLRl0MPIhtP +z5o3SSLDud0TzQdM0vnbaV33ynMbon1MRW1CL5OKMHFMh4Zoilm1ti26JiQZQGNS +1899V1IAjsVUgLtCctP4WiIlFw3BCwYjXHnD2r7gCJc+jSkjwrM+ayhZ9VIGFC9K +J7+Ynx9eon/QqvMT5Ek0pJnVsmjJtWLsmOEWmW57VFWKCgW2wW8s9+/YLdUnRCvd +B54x2MNHcGLWx09C/Or2Ir7U1Gcz0LXkkzbDifabUjaH3LjIfYafnyYcu1BIV9R9 +ucP9f4Kow/oWZHi5t0SFNO8ArqLOumTHxFpd0MtsV64xJf8TuZJkSEZbxrb7VoGM +p0zVVgZTp2Ryf8gN3kpTPQE8CCZInpUgipza59z5rtgpGR2X6y1Qswb7vMez9NrG +ihDA3iQX6v/osxNTVfBsTfEjE5tj1vWlm5JxteGZ44RiRmMk5GuvfO3atccFEmxf +NKtJy14jwmy0IepxSMrs9sa8/cLQGH6OBOPUqmE8OpnVtlu8BUt7y9BrUMtSU7g1 +ej3drUGzyuAMSXDPhRuJjL2KlR7/YPKvLpBvhGa+vn/VkFcaeZmx96Aq0dOA2dQD +v/y5xiW13f1s6/MNfm1pt4vGMoijlYHPPH6e4sFRGhqcgsHkJuJuF0HKCunXbrEU +eSgpZ+zzAEt0C/Bk9iaKYvqG9m8YDL+vG+hZETHucvmGATURoyB9DeyLAgLWSIGb +LCiL893yhbgVc4j7tns98X146xX3naHZcCUKzN7cFAn0Kzdxv4T0TMw10wTuQj7L +pzykhyrlSCatGK9ydrZWP2rg8xIiAxtvCPvhZf3Douk4KuF7VTAwOSM3wlgAnuZG +KqhmaFb2cl/UynMiREMYkAgnSo4oraLSIDT2tKkCm/Kv+Fx4HsUCAH205l5ew+/J +9ftje6gE2evhaycR7sbOWul2N3gWHPxNjq21SjZ61uTVfrdu1Ls0vPQ5VlEapFmL +UeHCCiRL6wDZx572zukaueonghq8PUO4GoPWzY3j8kX3PaObzNxyfC+Nt8n2yklJ +vyuWzEMTym5d2knQwt/FSsyz+KCHEfRwqd6Nj0q0KTyfwU2be92Zf8WPDhn6Gv+e +dNjGLdgjX/bvhsEJb1SarSPM6Va8x85qja5PUxcsWYyEhX2VOxWNnG/9HMwhPDDg +oHiBRUKTx2GQ0MShB+M/yG7yWu9JRcBSj8jk6+z5TfdFXfed1+3+h/4v5FNlvBQw +E5WRadVZRa9FjgpjZ/ABDZbTUTIM05akVyegoRxViX6Q2u93u3HpjA2BYM633uJs +l85czl4l6t19Qaes/wyStS5KXMYR4QbmHAmn95zz5C6UMYa/F1SXsbthooEuYMqp +WFBhceMu1d31POJImVbfAY7GQOjsy9Iv4ftbiG0SI+PvByP1/HOwlkyJi1nWRPTv +7yOclBXc3WRx/zvejsQule/Q2pgpeuvB/vw3LVR6GwZfsPzSWoyHurmIJd/+Mpe+ +NyCaUrTsB7IRwnwCX2eKwb9qzyLPZIW/iveJPL/2YrxK1IpHSvbq7fSMGmgWH4V+ +mYIEIiDavMCuZk+yS6WgEF7DtYT8ZAn8UCNmLvDtlMXMVXKHIF7rflSiv3zm1nul +o0giVpplQljtFdl0oOw7iqhpNzSNvTSzHGo/wxMT4ECMuWJ3/cYttFmaf0gY83W8 +tEKl7JG33jxf1MRQwLe99Tn2XTnZC6Mlc5uATQFZu6hwekhjzgO5HBkaK7yaGO2U +xS8m8QT/MWK2Wkqz9WFXjlerLAHR/p7GiWSuRdB5PaBYut1gZwAJvuE+X935qJ07 +nP6jAwuITvxBC3rUKBRcksthAVbVtfQgBw9OfgzUwdEuGf1a0GhrqjRVyC+bwLwa +SVAzu5nlMK0raxxoxTKJ41mWe0wFft8voz4XXCIg+Z9rnNLhxeeIyVCIFFbbDvjP +taYLmadGlxc7Q3aGphWwlfMqVmkyz+8OYzBpXxF3DH6cUFCuqQopiA+7YaJ0GJ7U +AuUYrFWTIAUXcoXpZaZfpirqpoCx7GTkKr8uxFer2KnJcgBtZaz4nIGypVyxYZw/ +x3yMHoCP3MgYJLeMklb0NQYpFhm+yl8T2emxXddDdDqFWKQJXrk2V2XTCsCLPNnQ +qs98SR/jnirtGctuNT/gHxBQjzQfBZNCMEfHLSbDjGt6Jd/KvwsfDC6Pc7a+RrRN +TBObD2KPs0i4mTn9dYjeesRtC9GLucGZ/uT8CsybvXRY2kZi4GV6uHJuQoCjZ5+b +/9+TMIXoWSw9TqA4kesARL/ieUYjUAvTGFsPnNmvG414exjPMFd1bOO2PZM5PMHb +3bj9csVHmF/+MvbUEzJSIl30f+G/4T/8pd/dvQBWbr5rIEys4mLGc6bQTKDp0kwo +9SXblV71Ff97UNpe0G9/m/RhL9PxwTnSOg9aFhUHT/6oWBNeg0TqZdzHcC+nZQ1P +Jjuw28ym8PFigJ0jIwEDoErN3ycIMbvksw7knPbNnGLzsKRaJlawVhijxE5gEpek +fUyUdqo8VCvKkBv3zZmr/QcUO9wF4Ru5ZpHIL3e7K61bgB436hmWMUkLfAiNAxrg +VW0i5HCo6oIrVTyEp7SoWfM9pKyE45EIcwHWlxfyQumJwP8hXPB335yqfyEWAZ6p +9VScSwvvLbe4ruRmjhTsB0Y3i7P6RmDf+0iVnt1g+BG/PAEeR3c9/Zq0G+CXf2he +EFBHPAA9Q2MLp6cF+FNorJ7uLFqCFK/S+IyvPbcf6RFJyIzG+IJf1V4tvmzbh3Bq +O2qhmfaCSDrtPefgIC/nJxgBzDPfJKBKLsuIBQFeapXS3LeZmgZ5btGPy6xasdic +KScJQdPQpF1/bMYmFkf2gdjBxVcIuJE0YgIH0w1snhR9yWRaSGxUafQisOvdMSFR +rWC+9IYHkxNDfhdX+HO9ZMpb6IrwBnw4vv/bHanpp5KIo96pcX85gxwQ7tEP7GZE +4gm3/8fFigthmOM3yLyUdaqKCgjnDqdVVqVldqnNRp81KsXhE+pMdZQCrTU0F6gP +89bYgTA1yQfoM7TEGUz78zA2JmMo0dOIu2dwu3tz24/l5tDxkUVOF0Tx99RaMCix +kvQKYfMFaaBqeMxJViJkfjPxHelqDqk3WAqXhC5t9m56aBqWbasd5rO+7BdsoraE +00m7R3WpMSFSBEEeYTYMmEqyEORVVEqRa376AMWLaFtKtWLu42XFQvJjYk6vbCoX +ZTvev9C/RKwQkrK+JidNJA20haG403OOACxDiDHn49E09KgrMIl7B62hdT13OCun +1mMe7QzPpN2VdzglWGQsz2cyMMBywCiD+A+bPku2B3w0pGGHjKS4IhKiOLkNeR2u +P1qQ2d5U3RRk5BmLevQVGgeV2bHTZP7lDbO7GqWL0uvgWijayoqW/RYxiniXpCNJ +ZK6QwWKyFyr+yaipB8wRm9gmpJdawo4QHl/0T+05jgTHdg2O7t22OVWNNYF4fzms +TqPYYV7KEp3sGk0jrhtzGwz0FE57NLZwFyz0E94afX0ZmnjBLWoFzX5R+YddoOVN +pikf5RHi7iAO8fVurVkoiH0G6osukNqS7myeylU2VjqmmB8u/b09Up3faAHPjLEH +yZewcVoAc+b9x9xQXU7Vk4RO54+HsyooRZAtp7ZXIyPrNpaGpdJV8p4oBtCuM4SR +2BPncWvmmwqae/NFMQnaLsZcDzmgUoyObzwf/fiiXdCYMvJFyFO0wt8yspUCgyPi +6+mkVOhH2mSZc8PFZKMb8nPEfpLsXN/1k9ePdXx3i43tTVeXnyAfdNU4hIjKFfgD +w4aXAvIPe3oHr+gYgwLFgJqTAlr8Egb4Ae+OmJgSI5DCepA0pPI1fmoN+aWT4lWT +q7xUGgQBzw6xU4rIptQfrpNXU1TpkUqsNYlLL1Uu4o4E8d8rdToziVAGZGgusPcT +LgbgMY6TcZMv01BLzq9L/u5acKQKf+AOMQveVupBsSlpVQda/ufj59Up7tPXq9J4 +w8U7qmsHSjv+DczwgzbYn4oX9au2aC5VtglZdNIoe2DT3QMpolblU+V9rMLOnC+F +19EDLn7U+nq0ZpxeSzH0LXDcslN7e/pGwcPJymdk7KWCXf3R4mtf7+IKup2xkO3c +PmOZBYGmESiiupBGYeB5TjmPBRyW/ESaMO+/kwBqV0v5lKG33ATCSDcZ0gq8DOTI +hJza2b4tF2V1qbbAn7FbUHlgxPKvcITsH7yOA9JtdBTSKpixTR6OyV6UNq4xWuyZ +MgzQUHH2ydLrWSiwc5K6KKclsBznaRcMv8Tjj6Ph3soWGRO9dtiV4Qp52OP1HMPA +lX/yybLm4wCjXUetaRkH+bg0+Cfh6NLylXh3vgs/0BSXAVX/zJriYOhpVFsS4nAX +5GVoM/n/C1oEfw/zF7KA2lTDuQHHXJ46PpOxBNfdC2Bx6mcLpconSrHmwRpfdaGT +dd3aeyDZr7wFgsB/gFcqLuOG8ZFW6m0rmMwNQXa6nHImFgereYE0Qt2uSSTWgFH6 +T7NaCsTfUlUvrXxLoWjx7V4yotbnxCP9g0HClewDeXKE7EBYjI5C4QCPvkVfS+2K +GghKjCROpHGnIrl2sghAtI2twcgKA5+wj1Zz9H1O8t75sPYneixHOZCPwNspO6QJ +GSzVCKM/90Qf4io6l0l919/40Y7RxlDJP2c/HaIYXq9aXb/DTOA4EM7nTFMNuPjC +Gl636cfvYEyV1NPEpjWP80398wAsBWePWAJAwlkedw1+Da5ETnmVxG6oIFIS+O78 +2grviqXNS2nzsY2hPU/6Sbe/FGIP5z++6f5dWFNdS9hRUgw57nwKMNp9emhPMeR3 +mFQEqfGNJ6GG4mttCpwXqxKOiNV5PGEHG6X8fQbkBVTApNar0lvTUVza046B7Prr +1agCfgvdSU7ScInXy/ptBEUzmcurc/Qpq3APhyU+4ekCzbkHOz8p8CaB18xHatV2 +Z7mmYp7m4DAHQjCB4Ax85wI5IzB3bzfgw/uDkSgkyTcfAEdT5WLplfBlqf+SDg68 +eWE1Qe/Oa34gwHAdcCsKpg214I7vmTaUbjmWtgqhfTL/98b/trF9bVoZONjld+rT +FRIAIU0JBGBRWbadlAmTnt/vZLGGlopKehw4MzVIrv7HKDxF9dy27nXtGwWFHnd8 +DgFEN07e5U1yjd5OJ3hs+RLjozjO+3cZGxn+rodXktcOXjduA1nAaSZrFPlwjURM +EwnEHWCNlEw1NxlGmpwmuJetcbyqkCPia6Adx5yhlvFE6J+a6n1jRdrbABDfMJ9K +8sCCLRP5sdmrPfc/EoHDocjgyN+khJzfcGQxe0gTzploiMnZi43nOZISh0LG69zC +ApBdQo5ycR+kHM6YMpR46EUl6MbDF5h50leSatflVOsuauLzCVtPHN/1CjaJ+uij +H0yT4ZPFCynxb0j/Zk6J4PuZmxdDVRNo4K4C3vqF7eYgniMTaGbsD/gSubbCC2qg +bIBDYI9nxB7of7Qt8u8CkfO6F7KrFrCDNT2elbRonKl5Pddw6KVaDLK8sfauGpgb +i/XQaL5fVT31e5uGNQYgi4zvNb/JxvY4Yl2K7qwVWecXvgnDfxdgGCDoy0m0h4iY +ZFKunIIPZq11ljy+PgwON955Nwka9aJSkQDKo/t7HKlZqQxYpYyOFd2gsnQEx1FS +rfj5ZgbOYjkPumuBzI4Thfwf+e/0I5stfUukveXsoVmSGt0ta8euOKPogvPZHlsu +67k2DWMsxce9N8E11AgoSQVboYI6YU+VUudyrRdVhUEkp0HvuqxbSB0uz/E70uWu +xZr2gZGhk6yfMAp+yQcGN6dJv2u1GBslV6yYpr0BfHEuJnePHJSwmL9NT3TBrcyS +D/Zh9NtDnuQsbWZXKB/1r3XRqqTfTetDyz4WvRcvBmkoQ4X5RMVT8ywlGzKFieqC +RTUVCWNdG20rVLeuJcaE5txkVfTgSudIEl5aL7ERK45yK13RqP9W4G4YyQHUUVPu +y/sKOCrpGGJHPJBcG8Q0urqeRPq0DkLP9qD626+By9cmMpXW9v+Cc+ZWV33J6dgA +hbBRxq6ruom4UEbNRASkcfVDjqGHUnIhbHI2zgJhNWFvHYt1cJoYPWHXza5lxt/f +NFw7GOwNZolB1QIxv92PAY/PAE7bU+vAjOc93BkI1V6zvNdl93ehCxB3froP0pTY +o4HxVT+9scFcopUH7yXEOvMGXErS+a83ma+0Lp3uEC5be9TdgcmHlHEDfRigiMoX +y1D2lRhMGC9n1kiXsIA6zMnZD7rqlzK+jJSth5yAqFyJOLzD7KKXmCTL2wBpspCM +kKHHvq+VuiLJC1fjkGdyb0gzA4YjLkGIchI/gFdFF6l8/vzGGIXwV5UiranVu2y/ +2gWUjohF17zeBc3O6PaLZLPaPj3g5nFvKGXSj3E7Qj5gtjXdYq2hU9vfShFFCXzg +vtQS+wJZ/mdZAYrHh0+BQ5v5hggXvyQzTUuoxWyAp1iZx0GmGuqHf3gy7o8jwD0K +KfHSnIepMaBR9e+XYQBmvLfFQLrt2KaoGyc6Js7PXMloPl34AiEu1Gr2wftZU6tE +BKeBJ0hY/dAG8s2zuW5SiyMPvpttLLBmfCY/SvlI9tPCbLHMs2NUof9Kkh6n2pwd +PVyM+HLZiLkfxux7ET/J6QrjU6dVts2RXtHDOu1ZJq0NkCu+lj6hgxGTyoliUnub +RRW/+HIbpFYkOLc1LJvJD/TCylTIe2y4ftuiAIVR5vNR6ZnC9Xfwy9pNkatkeeuD +P0J9gJliBjr7lUT9yEzpzFCntIuU7Z8GRDxuP9dnka8phYMHeUkUpWUfUPtVk3JV +NhuaTllC2zO4cfdgo1pMi47tgC8cOwHzkrf6jA5NeRX9hyoHPpDNaKS1QqYYdXQc +r4MiHwVxg3aUAa0j8EQa3V/XPT4frKAQCJvXs3lzh1TrNcAz3r/IVujvNLo2bXmx +B3BTDT6WTqYXFxBoHJKUV+AkMzE3L63o+SrHlOdcqDXf1aDs/YKhZGCuWNS1GE9z +Fo6kY2LUsnTLuN/dz/fG+FtMuvLtwJKKDGX2LG6vF9Fi+uWEFFnj7+RViXF3EqL+ +qOIs1A5XWQeDmFtAk+079sTii0/AdZPR9myF7rNJQOc344Rx+y1OMr7jV08tFugR +D/N3SldpFOQ9Yrc8Ks+cSgYJwUGvDLSg3awNq1wv89hWY7p5UwRtntlz9Evmbjql +CmZE3sbYKupamNaS6rQmyf+Q8kEy17l9Ld34cSd1n2slrUin1KyJZfMZ2F5f4If4 +iXWxlfrI1pJ94F+Ud2n/nQOOiJ/qN08dX/N7qHqV7qJWs3k8eoE3/rdllfaeyllg +OvFuF2XsylR/FIPjN9MA81FfGWV8tWfX/RtHWcTrPutwrV/G/OERKLBvSsTtUxDF +P2bZewouZqUnjXUiRQoPRLPFHLjnmjwHAaVVSHZaHcePFz7pkSlyl7ANfXcl++KT +yK8SnlXkeZji9e7M/unspiI5DWG9HNnEItgmwL4v/XeurN1uPpX/bqjSrkPPASFF +OvlIYqR+Z8kjPuy8oWKzlapquqaqgGrNjvThX57fbLYvgLCpAkRPk231cVXMJBi0 +rE80nIMB14mt3qM+t6FwsMILYrqb78ZI9f9FwGI5eP4mHmPwL8TUfnXty2ZslPW2 +lswgh8GG13272+U4jXVYQOyhcxPuAscTsspUxZH+Lo8EdqpweinxjcGh+DBrQgFq +W+Cfj253dR+y7u0v3lP859gnQKVZk/UxMebGZ3egRniUAZDp0vjeVoqY9RJxqAQ0 +NY3CU/Yi2AOa5bIaaCBbdJGKzTlQdphwMec7q8hlpJwE8f5j2CUmhHrZ/78FsSg9 +uq1zDf9oeouorQun2P4FTtSAa+Hf3MHCTtRoqCMgtlzdBsS4ZUQzSlnqkzKUVJ/9 +lHUeP9MliDwBba2dkCEU01g+BThz3cRn8qEVo6NTEWvxOCG4VB9bWlIDC5UyX1Fz +MhnJe8wLIQwU4DwUex6UWc4yvWvy+VBZvbokmojiYQtFxpcnbhTuXaRrEuSjv9PM +ucmG2i1E7ACQ7+a5u8Ot/ssip+Ox6p8V16LLSKskDt7mNagTG/QYiCZhC3qFyIGV +SdjatCvVIeKx1rYtdslFGxD/UzivV7xPfQHWPU8n6wHWfAVjpUtxsXBJ8/xok5VT +vVVEz/t5IU1E5PImAF/OVfr1kk3/OvG+0cA1KdVplpXE5UocNZwk9+xsX4oHxoto +W8N9ePxRFdCxp+OyVNQVWEmXziJb5orisDDzbekxGLpIQdTNK/IeMsLzA1xxoSJ8 +VEKtk+Bwi03Mj1CBoR5L7zJQiUuZi7rcalYfG2OeOK6BlRDo1o69a5Z29Mry3Zcq +x5lsU9wMhZJ85m7yBmSGMgKO87kbsgzOB+vDWv56SLAWaIMxgtQksDQeIHrBBHmI +ndhvv46XSzfTIeNkSYXTAzjg49ibqz/ftuTWzXrpeoqehUlV0DGoLfXO04vdpkhW +dsZN4qkbKAISGYWETldWrc4SReKewWfs9CqYNoKhAxDI11efUhTLp5o4GD+LVO5F +Zo9B8wZ6AwxqZZkzY1q+GWKab4MM5K0NhgkvuO2XmNMkHf2Pd5BLG0tf73UAXXsj +iFFkYUapjW76sp8zO9+2um8pmV7HIDxNOfaLQVnJW3E+4Qtx4jVoHiAaR6Kr0JFh +5u1cX41mU888IKunnc8DkdSFu6DoiHwIM913nKfCtE+2tp/kzkN7GYoNBqEmhkP/ +RHqiN8KcbmlYZbo0gMjzSRRgnTdJhfTPctaZOylBGJKAObj7hs8c2LB5jxuHyRMb +wov/cqPMJ1Hqx+dGcCzWtdi+6M10596P6KHgfUIMWJKqxFtwEkG6TWtG3ivQpFX8 +w30mImFWKtZSx7XMs/HeR6OkzFIOjm6EAaZ7PGf6DjGrQNFKo1CuS/EIq1+x2v2E +gnUhPiHEdf4sHhOVuZxmniCvnfWOiWF0hj4XmmdqpWyFIi8kJEqU9o88oDnmSMCI +D9lqLQBrgX+NefvxHNxSul1ZMFLQ4weVCcYSyjLsjokoFwTNqlrWQi7rLPPmMS/Z +4O6PDlTEdm8DS7bVgGrTlNEKMWyAE4TEwPDaPaqBYZ/Rp3cCFJSWohpXLenNXJSK +XF5Xx5z6BkY1zGmSJMd1sUPGb6AwxEceywsxDj/5pd9fnwDvqV6d5+g2YaB6Iq0p +PcURws9MzWpewBYUtBkjn6+FQfubVM5Bkr/u/fw2UP1aJ5+x9GDWyMKY5pzxgS0M +FpnuplbrV/30B95qLVykwal/aC2v/aECUOmJsjwXZtAaqoAyFMR9eZ+W0a64k/Tw +v75b4G7jCa681kKeZp6qSQhc3l+PcIG37SFrfsrwLNF47CWEyNULbCa/8jabLTxm +Swxi2tcE0zAoyUv7tpAkN0GR1knA4W+k2Sg2iMppSQ2lV9lGFhTYbRohF95kGZWp +7oht+oD3Y2nwx4TWDA6JjRPHVKBQs6M4I8T//+fbfZgJiD1lx/ZRPrR7jtm7M80s +GDRQvtZ/aul9R0KHweNZHr9o55ywYELf/BNAuUYbH8qeacg0Lc0y69rep/Dk46Hi +EgVrc/jCSKLXm5Gv033BJL2NEZK5nsyctVYXBrk9rA3iO827jZDk5o1l/znOETYh +Y488IK1wfbSvfllyWLoHgtHsr4I+jVg2cgPWK6+ST87GU12almhMPQ2/7ifO52RE +h+ZyHO9+E9y297aMqeozrqUX/7Tr0u3mnJ7bWB3OVz/yezIjw30N0/8zkEM4aIHU +itVupvRqQUxKPBsld/QJ7j/X3oVDX2AWHRMMsKRpES8fWxttWa+DEBxSFCPluAQP +zt9BFDSboyztzdsIsB2Gtd6WRJH0ETOuqvY6/MMqPwWZy8FYOaiJFAZ/Vt+6T+jE +jFAz3dBQfMcYYW+CYt1943atXCm5V2xB6pSqzfCxmgjT/Sq0LLEooME7lY1Pwgqn +wj7VhDuAEw3KtgRheg/pqD6Sf+0pYNx90ormkU83dk6T9aqi9H/mSkbAP6y3OeHM +B99f/CorcIntsAQo+BT7CFx5Dt6kjcVBgtiEJj2J6JMiNFaL1xNQDw+29d7aH5cl +XuKkO3FjTuyAUk8yyllbExK41yxQ5/97vmKa1hO6jQFrsYmeLj18wX7sBkvN/IHs +FVYrverD4enFrQhRRkEstB53+a6UAUJAB27gUbAJ6o8HdCFVm6UuVAOYJOI+ZrsA +ai7akwRGuAz+e5NTYOY8LwOWwuXpBlMklsIgb7Qf/NzncoZALOONNjZ3WS+EwdvI +c99VIvuTp6CUg6BZ8JwU2Hff2jVbSyduvu+0ZC2FclymJsNaFX2pcudwFHJCd2Om +FUNxQ7oCefgu7jA+ji82hGhLWjEM+bur4GesVQdLXxvx1W7BHRJKbp94gaJpvcFk +PVSeN2fdbwavapm+8S+IbB5ciebPpiK890v5LBsDPxR4yjVcPwd6Ssw7lxf9jaly +8LnNAqRAlVbU5DZmItQDC2NoncBCxgkFIE52w7nq6oZk5THNunApuQrGp9TdB6DH +W7kWj4k2ZUQcoyr8L68grduQT9Pc3QHfefwhasfUfg1Rn+I3JR95qXh5zI+ZKmx4 +bvRrtJpbSUt4mtY1B9pl9smhg0Fj7nvBMAcZ12967FUKQQL2VBRcpdFSQNgFR5ml +F/imKA5ll9DeNDBf2nIMwWLjIQK/j5rlDlaG5cfvmdEjfJAIXf3WrIEsDR9Q4AX/ +9xJM7TKW27TqaRJeeZM8b/GAIWzdpFNbqliKR2JpHwqlb7dNkfV0qSVoXopQvpUL +H7bJMEDzn2ruokLzymy7fk/OldDE0Cq41o8VVXpExxTIfMxUlTVJtn50xS2yByg5 +NQfhwlpffq4xOljWwyLaHbq6VQYmN7OuxhjTboow+M0s3tQqA8sosiKjaAg/t6vZ +6HzlmSNoLgd4GOg6AABqblf2fjbD/4mpSHqsnbIIqtVktFtnzWiEU7AphIsztXSf +0HnB7l7xlJxEM6m5sQcP8FNDfErWMdoowBcJgF16QMS545ZqzPANokeWMSl9D57Q +ZNisoZsMZ3UG/rL6TCPMQnqHv3kH3Co1yl+DNrr8dGUC4Bhn6nVg6BFBuiNSeQUQ +uVvJ3PuYeC3NonjSMiRxwhkeST4XfEg9S/UZ+ApLNCLlM5ql5ZJObui2uKFzTAed +jATO9xv44JZF8S8Uva59mQvfC/+CjDNHcOOrrJy2d9OZPu8t4INZs83UTynUxh9F +2Ln3kYZ39yCTwSvB3+kV30R7/n0p1Cfdl7M1+fTW99jKXJ9m9pqjb9QhunOY7T12 +3yCdyTsdwX/PiTIUwM/pKZjw7xuQixOXR+OZpiHmvMnrzUSeMZsxj629E1mrRMhc +jDZb9ZnQOoLNggbaKTRuaE/DjpACpuMpi6jmnjVPAzMyGDOtQ7eaZx5lrpBGgv7z +pzd7SxXJn7ytfWu201TStZd0FyLjiAoYiSn4wNc1itbAhjirRLQxvHvaUbGmkAiY +kdOjeebjg4duEErxaDcRLcKRWk6MWeLH+xP3GjWSeQdalpfXRr1Vbs+ErAC5yWoO +D5W24qzZbzFeWngKEh/Aykchm42ZY5oxsbg41frjD1eKiAqCs+Is8x9CwHtu1uvN +jwxcA5VxkrxZlrVyorm2+mmnZ6LPwccUk8fSUnR0HtI+g/lEVrr72ho8qGVfUBau +4Muc7tg48iggb8Qs6wTuhG2ykBI8agGMV4aIjV1P1pBniXi3RamcMphUkSSplFPV +nBEx5RzaRyQZrtoBsPfGglpeOIwr61l6zgTtD6gWFwCvFar83mihzMzaSo41hDnb +K0EiIZxK3W9jnqV0QsCdKxb9PMrBM3ztAdtV6HJfsZzXxqD+zCA8Hz8rO8+NFgHa +42G6n4nEfXCUuzZOGgRMFSyCFZ3g5c8l+ayfCSk8PLF2Lx5K8cbu5qZrhepicJI0 +2+sEjhpf72oyNmQ3B1lUkRcRRgKWQGfRU5raris7oSiou5zmZUgScowNSyZHObGI +9cDxBrTWbm5kkS9UlXMBWs1Div//UJEmTnbwp3evZ6kAjunedxIjI420jIGD1VCu ++QQ9OZFak0glvOZ9L+9AbcnznmHrOd18qHazDPTwmpf7bAvDIWMwj3K/m+bXIvjY +Oifh6YR9AQiZuvkvFehZR0lFA/SciFejES7UOyfTqLPYDl0YC8tywGaGtNOrXc+4 +p9kYOeYxeKZO2DuOLr9rWgebSi8OCQ1MsigPeYintUOjwDHbWhLZ6tnnpA0/5+V4 +S9ozLh0Zp3P8PyyYUCmRUGEr7Ly5Ec4DM3iU1JBp++ecGCNy9HY89LCmBcicsMV2 +uqP/umcyi3rCYR49SlvGlUNFeKwFxFGoU/zH7MXSFMBUfulLy2EPS/kX9CdgA7XM +Fu3QGYiuyw+0SbBRAAxKACUPiRy80TUidmxSzSWetRlqdW3T/uOMDMuxZN/UbEsD +nqVJJlH/7VBI50cME1SghRw8Dn2Y0wz+pSIFOR2ybzRZuQnsQ3hMZ/tqzuyBsMde +rtHFl6quRela25mnvKDsOuk0X2f5buDLP5Z8ypshXxOyy/MM8aXbvclq/VvpHLu+ +pmE2cztI6bWlMfEABeev9DZ7zjZ4ADPpm22LULe4ZGZlnkR2IjQthPSlnDZQ0Tiq +s6GQCPZbvxTOfQgtgVIQgHph/j+AuYOGGZDrGTyjAe3jeZyaAPL9mAPvKN0v+egr +eJ7dmFnjojFPYIY1JGQ++d3X4AGG/08HnxagRwEsaKdpF+w2ou70cW1TupdSOg7Q +6as3IlWRmPJLExz75MLKcu6jdX6i9b0LkuSZ81PVmXcnsCFaia2acbNytLyAMtOI +ggFvlhaaOueElE6G4LgfB57Qler3ipEO3D8Phl6bRBob3nZzmMSEJjICvgMWS6A2 +y0klebEHZQ7nAuSQRA73/YKty5HuwKK5Q+u7n+kud/nYuB15bCC7HsD9AnXUZWY5 +Wuv5r3H/TDmmqcACbFLjjgBQk6xhqcJ4n8SiN+wLhCSRudPhuwmNppHqC3akHwzH +b235z+FnKmTcbODzqJdiUDgSR12doVX6vMVhYm8rSWms99gou2dcAEtN/VhNXh7U +ZpYY3fjcp+32aSu/C+8TT6IBnUZuHvHiE+WbRf04xCzkOs9TxAz4v/8op9oirgln +cPnpxYY87jlMcH6P/fKzT3qZlzpCYfMAeTkFWkwtZ800nDhlqWVDqwaaPDiTskdo +MgbdeY9En07i3sQLCpLbTPUuJZAjJF4Xd6veORjRM+VAkTDHDZDNjW/sOYylh7W4 +8v//tJwAnpJkmeqmnfB/m72ezVup3LGLM1uCqk5F3JgHu8Nh7cK5Agl36fx9osva +fZARlXRGZHnLgIOsjTdxJhP6vTcQ5LQoObb8ZVmDwo24/6c+RuGDnYq14paDCqym +TXUoEXXVrNNgr7dlKnEULogAiEvL/KU8FN4e63TaxrE6vPBV2yjQTukEBS7tayBj +8rJzExkVX8DGJYQiE5KSkK3rLnJCtVC10w2JEpvS8MbMRH/pcRHDKWJ4K7g8Svk6 +S9157mRQTxwCUeJ1d5lJs8UZiyG491OJQpdDb9Nvk8+rl68CL9/+ecK5JDVCZEl6 +bz+GHeSNKpxlKVlJBnJ1EnGaoLhGxxeozCWWbSlfrg1MYXhfBGEaz4f0AcLLZUQK +6dDoJIEJgsr+z4IUfKp8/pBT85jHoVJjynpIpJO5ry6QmKevONKpCxIwZIF0hulW +B7DF5jLNXyxoK3rYMm3h6TyPFjMRpFqOWRiikanwqUkr/Wla/pVCg1klxijXYqlF +ppu+uk2u//3IjJTsL7z+3qHAzkz7YNvU9sG/jnW0cdVPKxgz3I8KwpDB5iTWBUs3 +S80gfR0KvyPC/OCORhkA4uEYuEQbLeS7EjHDkqGHvJ9Pbn0zbsMyQXThcVyO3vmn +ZvpvTyIXFoKmEsRHNisDv2+1CBLtUz9jguQp7p/Ivw7iUaTIJnXqFghSj6uX0QIo +9Kd90aKBSMpIsIPEq7QWaaqg7KptvE1iGV82OXFTk85yqiaaciLl48tTFBIhY0E6 +Hs3dDe8E+dEAkJsw+E77TdUSLxII3aMPVAfcHZ6EzCVTpYalGAcI+NQFF929WTNH +lp62ZXwm7dibcSs4IkGt8+H9nkDWI1Q0Rm7ssBwxY8oU2B+GAqCWVGFb7zvU6cO0 +bzNypUiHBAdRBLOL97DRE957pmo59guaUA4NwZ528BpWbhLm9tVr13QUYhkirB8r +VYYHYsjZt2PY2+EdG4jcX3tJ15KVL4Q51a7Xpiad6dKZOntHY6AUupnx3OFGIOHE +Hojn0BwNRdyJMOnAXU4ItrDtOFtDBZb3N311uZPMHbz9xt9UMXCKC8cRF6ThITOz +osALdrnkHTNCABIxpd/Lks8pStif7/wU7pHKEO1K0AP+JNS1ZEHsxUEmUFcqr1or +mopzC/nYcEdzE3GXYfdboA+zT9lRDc/2nIM2A7EAU8Nb4x6Th4P/bhG/psb/WXmv +mH7P8X4dYwHqF1jOM5sYLybVUgzhBEzlsnrL3ueu2Qee/2FegJnAqzoHVaGUYx9w +K2OZ02lVF1MyECYVwO1FhG2L8UgUIfdBrecDMnIYTmRmmHAHC8jcrvhPAMnw8wJR +VuQ73QMpOLpSefj1jpJHwk7Co4kEN4I/VSXt+RwRARbb39jJOGOvo5XiJGg2+sAm +m/RSjkK0NRksoTyB+rnVtS8LbOIT+HAyE6mJBGWM88lJiWcVZj0zT3OXZeN3rcwl +HO0AVuaXLmoiv35QB38t5ACxcNgP39rjqiA/HwOMXasPDnhegITXW1a1AykmQKxW +Fdr9m0f6+iFTZMBL1hT9ucBIKSHv7uTOR8gMR4CC+VLhaBp3JEOjkdsxEuyJGf6L +R/AdoGjmyI7nlm/UI8zPYDW5eY4zaNMRR/R3ahB+s1hqZZhk13vuvDhgB+tqt3Be +ni2vzphiSe/5aXCP5tA+gQ+eQ+mS08tFWqETQXTZ9ppywyli8Bg/5EAmpllE5Yt/ +tF5fhBmsTJn+7OWASQajpNnhDrTeuQE3+055f0Fuceqivu7w2OEioOSFBCASVGjJ +lw5boZ/00YMDL9069qNTzvQF1Lhaw3JbLCdxRzRdGkAkL/cvF8kP5YRr1Ez8dELr +eGI3e+gX9wi/ZrhBwlPa4Gzg24Kk0H8Cx176L3/OsD/xKvFyI8rXkylgOAAMsOjU +GMsBUcclRhz8lAMQzlLnUCyBzHVcIvmW1tosz61+FTYl+PsLa6vmQAUnTu5nL2qs +yaneyN0jXqxAEV1dwPTpelAKG50J54OCEdtLeYRufPtYffTekawLDb/Mvd21OywZ +SlQT+4vlcVVa9rBYXEArOjSSPyAnHl4yj3k7bqxdDOX6TfG00m7rOnEVZui2PZIM +AQRCxLINtm3meZ4LrYr7VM0/zoMh2cpxP0CIiSSv+dxM7HOKiFEJTdH0B5VqnYne +nMRjOq/vpOtsrBU+aTZA83AqlKhAyhh7Y5fMg0ELjb3Tf44iuhj2WYdvSjJa2UzE +PqCvT+KnyAaAcKnCltXMYcFIZvpnlrkkc8Uy67kqkblH1rnUUjTuKZOBXE2Ks7zP +tmvsVqK6ONfUH1jTJYcuxHADOswewXi2r/ZbypPZtplw5tVxNTuJsjGKrkR2rsXF +hu6hCC1DwXvyrwkcyNgoNsu8+NirtxlYJNuTHcDdw0ZPGw4fJO6uDFWv0csX8E5q +/sTodrI0dQLijidtYku8eCeqnUmYBXjVexheqrTnuelIV9TQJQzG1iXeZUphNWgN +ss+NQI5ht4KeWgqDM8bQJG48921O/rR7zy4dcsxn4PuJQ/fM1CWYBpfXGEnB7msW +bSFlsr8IgFukjKkOTqSiMmHBaur3/y8pyWtrJNGJdjqUjgiFr3nOc0uT8XWNThRf +r7eTTANk+8OvS8K82LcpmXMzaUkJ7rtgqfg6+cyMDtqQalEh7ggFqhRI6Kv0n+4h +eI8gX+McyUJo47f3ew9qg0ccO/zadKn6WdTNyzkPDXxy8a1UA/8xZArCTVjhAoRI +gkQem2x3clbdBnUsByBMl7rzIgOhdCqx+dC2ghI8ummmJLI99Kdhq3vy8qlbLA4o +zauw/b96C/GPrGVkOxHDGkhc5MPEadWiJZNuouqt3Qx42lgy97RWRBsjBrgk60F8 +XxlUXQ8bmI/aBe4bBXLDqZEiALgowYWCvS1JkMpmjSl+vcCzXL7XypGqek1ZFrQJ +D3hw+JnDseg/8xsle6RUR9MjrGZZXsgTQ8ULAmI6PC8K7WCJGvpHSIw5FXqGDuJM +v3/yRRWHDDLIwKC+3doaIAVD07ww53URJ7lweZk4fjTLIkRz6U7Q9fEl6hZVH6wU +fUqRQecAXcdc+2ZUKmmo1+yp1RHLuo3GM25bEg2TcNQkuwcfCgSlGprXQEOOScH/ +xj8fzxi+l60xgPNdg+TcbAECfpXbroVNkv3rw+axN5TDmaYa3pNDbiGykDpp+u2S +4ussqfHGXTt3SeXnDlRdqyZp9QNbn7cCfAivqZyR0CzFMOpIZf+6u9aIGpAdQVcS +eop6iEnbUeEP5/+EmcEulIlvqh+xKk5ze2YUqE6W4psxozpcVdv/B4hlOfmVmMBf +IcWUE4qyea0Gcpg2BcgovgmYMomUN+QTCRUYCB0l69k8gwjrUJ1H+yvxGhoZdwkO +7PCYujnZF4MZOqN6FSsT2MrUwlX9W15KljHspARPuL7gcAz9qmEAJg== diff --git a/scripts/mount-shr.sh b/scripts/mount-shr.sh new file mode 100755 index 000000000..7b2048aaa --- /dev/null +++ b/scripts/mount-shr.sh @@ -0,0 +1,92 @@ +# ============================================================================== +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# Mount data sets to USS mounts +# ============================================================================== + +set -A mount_list "/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ +"/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ +"/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ +"/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ +"/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ +"/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ +"/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ +"/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ +"/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ +"/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ +"/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ +"/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ +"/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ +"/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ +"/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ +"/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ +"/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ +"/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ +"/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ +"/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" \ +"/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ +"/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ +"/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ +"/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ +"/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ +"/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" + +mount(){ + unset path + unset data_set + for tgt in "${mount_list[@]}" ; do + # TODO: Can use something like the below to find ouf a mount is in place and act on that + # df /zoau/v1.0.0-ga | tail -n +2 |cut -d " " -f 2 | sed 's/(//' | sed 's/.$//' + path=`echo "${tgt}" | cut -d ":" -f 1` + data_set=`echo "${tgt}" | cut -d ":" -f 2` + mkdir -p ${path} + echo "Mouting data set ${data_set} to ${path}." + /usr/sbin/mount -r -t zfs -f ${data_set} ${path} + done +} + +unmount(){ + unset path + unset data_set + for tgt in "${mount_list[@]}" ; do + path=`echo "${tgt}" | cut -d ":" -f 1` + data_set=`echo "${tgt}" | cut -d ":" -f 2` + echo "Unmounting data set ${data_set} from ${path}." + /usr/sbin/unmount ${path} + done +} + +usage () { + echo "" + echo "Usage: $0 --mount, --unmount" + echo " $0 --mount" + echo "Choices:" + echo " - mount: will create paths and mount data sets." + echo " - unmount: will unmount data sets from paths." +} + +################################################################################ +# Main arg parse +################################################################################ +case "$1" in +--mount) + mount + ;; +--unmount) + unmount + ;; +*) + usage + ;; +esac diff --git a/scripts/mount-shr.sh.encrypt b/scripts/mount-shr.sh.encrypt deleted file mode 100644 index c5ed09837..000000000 --- a/scripts/mount-shr.sh.encrypt +++ /dev/null @@ -1,71 +0,0 @@ -U2FsdGVkX1/97joDHr2PfbtM8603L6QRNH7YOTwyx6+WEU5Emp9YwRLWVFdZPxrn -GfDQXpdmqZ0shGjAPjfhA7ZMVz3qmWZHMr2eCMZhFkoBT4PjBYf/GVfW751LYT29 -7EbpJZYIkXE45VWEw+79zetmPr8xxQ0A7Or1prUAqzUcjSkPvO4iUSvPpn7GcubI -3Pcmls6W+Jd3KrT3POaIPFQsFKmp1lBMQGVEk3/4GGP3+aDhcDqjuV5BmqFs55Ue -YhE7Rcc4nGeCPbKTYe0ouU5VR3WVNJX8+UIvzL0mH+01Ctjn3PcRODzklTtqOeHu -VBGHwAH+NRo36iMw9wYZJZkT/ay8UlVySvKrAQ+13lxpKA64Eo/p20rv3ht84QI6 -lc8JgNzJ3RKo9YCK+iz3UPfXqPXen1q3vto36mCdvP0pv/Epz6PtQaGxXO4gEFlL -jmP6IfGurMa7xiluPxjXFhhb/9I0VDTFk2dEsalu3qSsvabRjJtL0bSW0jWFMTAp -+Q7JH6WjzSJQ29qCR/NmiPjFbl3AYIcQjtMJdziVOksZl43IRM1v6XOVfvo4jwOt -C1iWfqIIcLzsW7ccFqDAoGFwDWBdqenL0KFhi0I5Goi/HJb1pJyYylD5PChOv8rL -OnXH4jDvKEeNnYv06rmtR8rEzngDLt6x0wKQx+EXmEfXKiYbl7Dfp2tFLbVdSNcz -j6uXgIFaRqOHzYD8S0J6FpRz4dTMK5hvKxDfhzg3x91alQ0zQr8+kzU6kOWq5FF2 -2WfEhFYKZTGnL+E3wgkeN4an6gd1mp2AGJm/aTDBIZaHe75uUpfg1Uqm1e/GhNNs -wsJlNZDEB5FjAi+11mrGUUuRTYSVLBVmkYvMbT2nG2QiUuC1tAsH/Cv7X9aEXAsT -oYNn4pAlXmHllws12e1RWaOuxkaX8R0rzfG+dA1DVXzzDNZkNBS06Oddrlp8X7Op -Beez9+PpvMzL3X2vuMiTJYLQi2kk/wATh3DHMP5W+9vHxGYcx7O9G1foFiQZsy0y -sDaH53ge6KNFnS9wBACE0R9vEps7oruTCvxAmvFXv8bXtJx+JmDoFcpg8aN8dqvM -B0hZJjtzpdPz0y/f5cXlkhgGXRORwZir/okdg8ffs5WqLFHZO/MWYdWH5/ws+DFs -eMVlxafcmTvgtMZjRNO0Z1s0TMCwDCBy+mv3YbyFLzBcRZFBuStHfdc3QvOQNJer -J09UpweP5N7aQnivgbAstAMBIHR6WfRBmASkDaN/YBKF4PiPWgOokEywiVXAF4Iz -ph2Z23q6b+fTC3tBYRdPqm55cx/IkLMLJmmOcGPCBY4fv7V9QpPuYZvPu75esCC3 -vzPNxKKmn4r6dwwwP/R9xiwAIPS8ZE0rtD1xzi7KECsp2AMGFtg8zu+Ve4xjJUxZ -HHCcydRxlb76rz6UNHMalw1ywtx7afJdzoAwQzef0tmnfAI1w5QIi5bB9fo7VeZg -SVWVemnTamFm0SZcgZmow7k+FiFsZAxwe1JE0FxKblgpdBqWEUYkfosYsmll6OsD -HOj64MsW4+X25ygJdBkBjFsY3xa56cnzAyIkuZ7j0ScuNimQzNpdt3PT/khY6CY4 -vSkIja4TapNAoTwlTwLYBVLzS0pT78VS8JsX0cRFNCsISLatGD42APeCcGDzXB+5 -fo0+nJsjI+hLTMG1gpkGpZugChtoHMmRBMcHsblPh1QPsyD3RqStpzNQQcM42Xw1 -+ZrVxoeePj2boMmu5R2l3Rx4O7bBDPbD4SvPSbxX6s7dXeu7Hlox18LMA6lhMaU2 -8DmtcdUWRVTNsi7KifAh168qH13M9xBRUXW5n3K65W+v1Fp87mqp7sbXFRXRK7Gw -OBTScWf99JRVzJiu77DsxJqilLo72KCSbVuZkYQtDMnagHukPWplomgmGkaQf1QH -bAMAlcFMm8Oy6EDQjsAjtxj4hoFMYilN6svuau4UwImfy2Dw7glRppYJOtsa/lgY -89K5JntejJbBxObvOhatH2Lybi6xOZIBDueYzrefXJxb9l2Ul7U3AUhFlcjCv0uc -fTwJrRzVpzOqkzjWOZpS3KSSu/+5LCN4+75Y7/2A2OicpuD8dAVGeSPBxyiMziVN -aBt00GKdLe6WnyofDTnEmEQ+LLdS9fyqPjBquHTlZyF9BRwU1xM4hd5IrNlmusq5 -57MqDaL9RRMgB8+bx9TTSUQQRAeSdumk/ekUykMFG/aItjFNR8PEth340Cz0x/ZZ -n7xgo6mam6yTnYVcA0lBfhRd6X5OzZEkj+y9zQmhKUBe8NKI+bdxEuy1McdCeRb+ -XRI2SBiW/k4PiTZLDwrP2UmeR58fO4OHguyO0oPfnTmbYunqKNg27TxuN6R/MqBn -YDV/gS1RYO+lWSfll2cQO6cWSBCgJVw3Z+XAlGGRxiK5pMHictO8Tq2O0w8AFWzm -85CT7Xut9fJafdom6h6lK299UKmVJXzqesw5keYUv4yI4wAgdjJsQ1450jMT4CkK -WknpIHbwcloCM20OL9hVhTWtUbUKQGWwnauRabLDPSoh582JjCAAxi4keSeWyah+ -z4TGFi/ft+ZAFxhmspSjILUye7GtWJgXg/GFK0G2YoB01bXcINduXVbLfT6FvWOn -NzXgzEQF9/BfEkBQYKeAbUyOP5izaV7fJ+UPlub1Cl5/v96zstDfKie+OHHKELWA -3WZbxcRQdJKere+ELXCFRxCEqnPXY34ZhwAiPYswKRBPIkBbTrtdRYaiEO4XuDtu -g6hPhgQRvG66usxHnpGxPONtDLN54uQbkUMnLYV1mNQXCtaFekBH4J/qZGI9rpMt -7Q8bE8PLCX6aHs4+v2IqMaEj8LhxvyXu0Pewo9TrGAnL6TBg2rA6mB6UyccR3fh4 -pYaLq+P8oUCKm/ZzI9k6zFHZIsLsBtIFx+KTZMYbpX4TUGa+0Xd9DgEJ7wH83dTS -hdoJzL3dEXwfnGeqVVhEYLUi9NXhEEEMBEyPoH3XLRLXXHZEPdhDSg3kB4md/Q/a -Xf151A6UsETsRdZaJBNK4RsjdcIkheCsE3rAuRiErm8rAASThxoAJ0VWfq96lMar -D/OkvVXgEYVDRc04DyH4W2DQTqvi+37hjxxOoxUVK4sQYR4wkJHYhgvyYCzi6ju9 -1+ZBexKErbnfuOgDpBeUsyzLKrSuCFMz36RpslgBGe7GeKRfUjL9ehaBIhsDntbJ -z7aIzsnUls/yCA2O7Cc8tTvteZy98HW9krgqmD/QR/EKNq09kw5EiKlq/dUpSgsm -Ql4x0N0nFirqnocRKwHWJ6eQokD120/OjB6m0unYp4l4fRll+fjOeZOGkS1NM9nN -OSXe9vM9IGR7ZPrdeIUJjFKQVYzDJNe5FhbHqfaMCzbo30haEfXqqKY7EhiUQ80V -5Jbqu47byjFmJ7mtDoQKvpMXCH8edxaZqZU8sR1zROjLUo56UJa3g/lAmXrle9mD -b5lphZBOuYuzFIWov6DEUoTmUuyvvfMJLMtPBdo4bHsZl/XfQzFyDmpZqyM1OHGU -TgUGfmBDbCXYsXWCE6i/l3eTrn21LXRHwavaeuex0NNrT6S6fQdSbh0kTGiXpgG8 -oOPXO37BZkogXLGgHnPlpBaTtIRbKiq1fkgdNNec3aNhsI4oUCQycrL0tjiynwEd -GRCHsNdMpzLBN10xc9zOmn+Jfth+qw12COSCL+3OtduKOkFOTr9GtPU3KtPww8Xw -/8XZ7vQsyArdluRD9SHDcv2M4EIZPI/LqRSxvGFLzsu55W0/byW72dygv7dRSvd5 -geZXPO0WYDbYdJX3Ix5oCIdjWXDoU3MY6/GFEFQULdRgvGmPN2gl33IkBCcHOJxQ -JVglnmqb4W8v+veUbXgYOVCrWcRSMC7xhzMkxmt92sxg56K+C0JnLBx3848CzFLx -O3WlizRLYwV5sYx/fHvPiNgzq9BpCKozkbuh01wKo7n7Hs8TOBftzxtT97lQqSWJ -NdzZTGw4M3Vx5Qh4n7rLO0rG7XCJ6QcNf3rzY4Jsb40cMC530+4sZc8Cnoa34Q3Z -3wRAcZy5pjqOp67siYT5n1/c0TraiT065pdjwAO1GRQ7lBq1R3pc8rRx56tuHI6S -vUFptTIBTiYTfGUTNNV+plbuHc/Ke3YNWWI6IGRl38CmacOi57YbWKQfodjABgQn -B/y7EIGQmmcLyotiAR+aPenJxcODWGk8pzyiPhOkdnkZahQibxRha9ozv7kbx9H2 -eJPzW42Klv2y1EHjl6goF6ZOb4J83WwejTeBH51PudryI5ALm4t52dhDLE0gjhvG -s9OtZyIu7LaJ33BJHrZGqZOhhugbVqep99kbK9lV9Yapq+f26nzuQB9PENtizZlB -jF1zJedzfWmCtzzD+WyNR0UFaivew9dqopZUtxTEdEmrxyBQjBq/58qS7TF1U0th -Eh6KGOdwDf48IEgfteQKqRodkUjxdk7P2iymYB3HuBnB64DrM2j0GrPw3fW4/eBn -5vA/1aUyAQg5x1eFb9XMbf9umaAyulCB diff --git a/scripts/profile-shr b/scripts/profile-shr new file mode 100755 index 000000000..22da6d2f7 --- /dev/null +++ b/scripts/profile-shr @@ -0,0 +1,230 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +################################################################################ +# Global vars +################################################################################ +zoau_version="" +python_version="" +python_path="" +bash_enabled=false + +################################################################################ +# zoau case match +################################################################################ +zoau_choice () { + case "$1" in + [a]* ) zoau_version="/zoau/v1.2.0";; + [b]* ) zoau_version="/zoau/v1.0.0-ga";; + [c]* ) zoau_version="/zoau/v1.0.1-ga";; + [d]* ) zoau_version="/zoau/v1.0.1-ptf1";; + [e]* ) zoau_version="/zoau/v1.0.1-ptf2";; + [f]* ) zoau_version="/zoau/v1.0.2-ga";; + [g]* ) zoau_version="/zoau/v1.0.3-ga5";; + [h]* ) zoau_version="/zoau/v1.0.3-ptf2";; + [i]* ) zoau_version="/zoau/v1.1.0-spr";; + [j]* ) zoau_version="/zoau/v1.1.0-spr5";; + [k]* ) zoau_version="/zoau/v1.1.0-spr7";; + [l]* ) zoau_version="/zoau/v1.1.0-ga";; + [m]* ) zoau_version="/zoau/v1.1.1-ptf1";; + [n]* ) zoau_version="/zoau/v1.2.0f";; + [o]* ) zoau_version="/zoau/v1.2.1";; + [p]* ) zoau_version="/zoau/v1.2.1-rc1";; + [q]* ) zoau_version="/zoau/v1.2.1g";; + [r]* ) zoau_version="/zoau/v1.2.1h";; + [s]* ) zoau_version="/zoau/v1.2.2";; + [t]* ) zoau_version="/zoau/latest";; + * ) echo "" + usage + ;; + esac +} + +################################################################################ +# zoau case match +################################################################################ +python_choice () { + case $1 in + [1]* ) python_version="3.8"; + python_path="/python3/usr/lpp/IBM/cyp/v3r8/pyz";; + [2]* ) python_version="3.9"; + python_path="/python2/usr/lpp/IBM/cyp/v3r9/pyz";; + [3]* ) python_version="3.10"; + python_path="/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz";; + [4]* ) python_version="3.11"; + python_path="/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz";; + *) echo "";usage;; + esac +} + +################################################################################ +# zoau case match +################################################################################ +bash_choice() { + case "$1" in + [b]* ) bash_enabled=true;; + * ) echo "";usage;; + esac +} +################################################################################ +# User input for Python +################################################################################ +usage () { + echo "" + echo "Usage: $0 [abcdefghijklmn] [123] b" + echo "ZOAU Choices:" + echo "\ta) ZOAU /zoau/v1.2.0" + echo "\tb) ZOAU /zoau/v1.0.0-ga" + echo "\tc) ZOAU /zoau/v1.0.1-ga" + echo "\td) ZOAU /zoau/v1.0.1-ptf1" + echo "\te) ZOAU /zoau/v1.0.1-ptf2" + echo "\tf) ZOAU /zoau/v1.0.2-ga" + echo "\tg) ZOAU /zoau/v1.0.3-ga5" + echo "\th) ZOAU /zoau/v1.0.3-ptf2" + echo "\ti) ZOAU /zoau/v1.1.0-spr" + echo "\tj) ZOAU /zoau/v1.1.0-spr5" + echo "\tk) ZOAU /zoau/v1.1.0-spr7" + echo "\tl) ZOAU /zoau/v1.1.0-ga" + echo "\tm) ZOAU /zoau/v1.1.1-ptf1" + echo "\tn) ZOAU /zoau/v1.2.0f" + echo "\to) ZOAU /zoau/v1.2.1" + echo "\tp) ZOAU /zoau/v1.2.1-rc1" + echo "\tq) ZOAU /zoau/v1.2.1g" + echo "\tr) ZOAU /zoau/v1.2.1h" + echo "\ts) ZOAU /zoau/v1.2.2" + echo "\tt) ZOAU /zoau/latest" + echo "" + echo "Python Choices:" + echo "\t1) Python 3.8" + echo "\t2) Python 3.9" + echo "\t3) Python 3.10" + echo "\t4) Python 3.11" + echo "" + echo "Bash shell" + echo "\tb) 'b' to enable bash shell" +} + +################################################################################ +# Message to user +################################################################################ +print_choices () { + echo "Using ZOAU version="$zoau_version + echo "Using python version="$python_version + echo "Bash = ${bash_enabled}" +} + +################################################################################ +# Configure all exports +################################################################################ +set_exports (){ + + export PATH=/bin:. + + ################################################################################ + # Set the ported tools directory on the EC, see the tools you can use, eg: + # vim, bash, etc + ################################################################################ + export TOOLS_DIR=/usr/lpp/rsusr/ported + export PATH=$PATH:$TOOLS_DIR/bin + + ################################################################################ + # Set the editor to VI + ################################################################################ + export TERM=xterm + + ################################################################################ + # Standard exports used in EBCDIC/ASCII conversion needed by tools like pyz/zoau + ################################################################################ + export _BPXK_AUTOCVT='ON' + export _CEE_RUNOPTS='FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)' + export _TAG_REDIR_ERR=txt + export _TAG_REDIR_IN=txt + export _TAG_REDIR_OUT=txt + export LANG=C + + ################################################################################ + # Set Java + ################################################################################ + export JAVA_HOME=/usr/lpp/java170/J7.0 + + ################################################################################ + # Configure Python + ################################################################################ + export PYTHON_HOME=$python_path + export PYTHON=$PYTHON_HOME/bin + export LIBPATH=$PYTHON_HOME/lib:$LIBPATH + + ################################################################################ + # ZOAU 1.0.2 or or earlier ueses ZOAU_ROOT and not ZOAU_HOME + ################################################################################ + export ZOAU_HOME=/zoau/${zoau_version} + export PATH=$ZOAU_HOME/bin:$PATH:$PYTHON:$JAVA_HOME/bin:$TOOLS_DIR/bin + export MANPATH=$MANPATH:$TOOLS_DIR/man + export ZOAU_ROOT=${ZOAU_HOME} + export PYTHONPATH=${ZOAU_HOME}/lib/:${PYTHONPATH} + export LIBPATH=${ZOAU_HOME}/lib:${LIBPATH} + + ################################################################################ + # Custom terminal configurations + ################################################################################ + # Append home directory to the current path + export PATH=$PATH:$HOME: + + # Set the prompt to display your login name & current directory + export PS1='[ $LOGNAME':'$PWD':' ]' + + ################################################################################ + # Run bash shell: + # I have have seen many issues using this version of bash to edit files on the + # EC, for example of you edit your .profile with VI under BASH, it will render + # unreable, for times I have to edit, I type exit it defaults be back into + # the zos_ssh shell which does not have any issues with VI or editing files. + # I generally use bash only for history and running commands. + ################################################################################ + if [ "{$bash_enabled}" = true ]; then + bash; + fi + + alias python="python3" + alias pip="pip3" +} +################################################################################ +# Main +################################################################################ +# User enters choices for zoau, python and bash +if [ $# -eq 3 ];then + zoau_choice $1 + python_choice $2 + bash_choice $3 + print_choices + set_exports + +# User enters choices for zoau and python, bash defaults to false +elif [ $# -eq 2 ];then + bash_enabled=false + zoau_choice $1 + python_choice $2 + print_choices + set_exports + +# User enters choice for zoau, python defaults to 3.8 and bash to false +elif [ $# -eq 1 ]; then + zoau_choice $1 + python_choice 1 + bash_enabled=false + print_choices + set_exports +else + usage +fi diff --git a/scripts/profile-shr.encrypt b/scripts/profile-shr.encrypt deleted file mode 100644 index 195e4f62c..000000000 --- a/scripts/profile-shr.encrypt +++ /dev/null @@ -1,197 +0,0 @@ -U2FsdGVkX1+Ro2OkwERnqQxRgqE22raIG0nuZNU+s4yMx5bMTOFAwynkvI/D4vy2 -rb+wSWs/HErbUngILXkAWf8Slq8hjumbVIBoST1PyBWIaw61cIBHVRZymyfwv62o -NPNkcegh24mT16s8zGjHItZ65hxbgfWo7mKZ0hO8sC/fCzeTPQEvc0fuz9KacTez -sqtaz/ZgTpUG+Oq+a+rhhabVfBwpZQ/U0Rm/U4MnY3Ixhzeo6xWIETMWO8oSxc9N -GUVcNk5soo6rwOvuL/s2tHm8rIyHsxSxDtoCfLy5euIwEOriPgvjJ9fLV1KVqmQR -WHAxVXuBKOu+vA2mojPuPYkf9ltYW3yLDkZJ41YSl9KznjlEEYhFgQQK3Rkiqq9h -C1MjNwP2DalMLE628BXT8AUc6HODuHUuSq/fB9UiWqrsHal16quyOTekb+Q96J8j -V1JzfNGS90qhQTWUlAQ36NdD+60bchtf3+A8enaMqCequyaY3PLDDi+OYFgVRwcM -pXfcLfDeqmb3/Q1Jd8RBoz7kZZ19os0ad07HOJEPQHkFSNUfSEGwQ1vbk7R77zj5 -u/gyK3/RIYLSIMpgbdweAx78tYmnABXJ/8yBace/MpyHmUo2jxL+hweLH/aT6xzD -MyISNtK/Xw2aButSh3RsVegNDz0gkid/c4dVb2lhZ+i/rDaY2kZ+8vYr4A1nEgXP -n8A4IhIx0u3ssEOnLoTvBMBurxi0aiqyknKIm8GDveA9l9d7J8x41s/SXWM9mI9j -2/rcXbTQRn8nCmFJazdhsnY23b1O1cdHhw2MMkzQ76yE1iTMHLHS+nDJiR9tf1Sd -da1MXgdOPV+rtCvMJqFPizBR+lRWHEj0w3zJW0dPIbZXySUgZRuFa1L6sQ6DmHDf -Noo46fdezm+usBoJHwvc/K23yhYDrYAPPlsk5eK9mQEvlgFLnPZtduKMwys9NXUt -a10rlLQvMC2NeP93dZCabYmCYnFdIAl/kTqpjs+uC8kFDsdY5W5sS3Z863DBX/pa -mB4aR0B97mfWaGHRqxXYwqehvYmy6Dhq11aO4syKOERqrzQ+xw9NrOJ66SmxZdMJ -ckPx6yoIRew/3fdkGIdgtr7Jd0vDkoKSouXAOR1sdq42ZrLse/Ee7tSiSuy6kwvX -BBzPxKoNyKaNXTg28W0N61aJGn3BrEG6Eutx37VgC7wSk4awjWhxS0ld8DjIbxFk -hQK3+BJqW5BWQQh4FkHfvsWgKSDqzLWbXeMc0WacQJ5tSaMJs+Wc6GTbhs4zcEdm -XyBYsl4eagtr7KMNtLJGzFd06QQzb7T/wbE4Z7V4A35LyGcz/KAr8jPaFT+Z5bp3 -FgukvBkjCaN+BHOakmXPph7zeF93akIySrNm/igFauuhQ2DgqMTtMOCg2/XewoKO -ZbOWcxDUPy2yqlSg0srKYHX+8IKLX96zmmryQA1PpLYJmVZQ/t2muyCoMV137DrD -F9gy8Vul+pxkmXQ9ezWeeaBLjGU+7JFCOMJBkFGbUt4Qsjm539FrGZ3Tne3YZ2cu -PlFJlpYjZAsBnhIKXY7AX9vkT5L2rjuh4VZrLNbcuNDxM2OnH414r5uWoyieFFG8 -W5xhHJ6i6icYjab9PeWkl08hLLEL160ib1oPJFdNiFVL9H1OBhpO5krYLMG3/N/Y -B0u+u3ujhxP5xZkCI0mvMZWO4zDlYUp8w7ci+iD9NzOK4DUJgXmxTswtLEa49mOw -hVD9ZuZByfwZChPCJ+7i27CVrZWir0O9ffjOD4AZ/bpNQ6/TAyS6W+W5fmiT+Rfa -49YbVmuPFS+uEzeDJ/rfpbO32bJ9fGgLICyp90dCss8lBoS6BHGurkf7XimmSkT1 -3onAZ+rs6T+pYrcAqBhscjNQpxjX2Tyc+6M/lxqsNFfG3Dd2SE1W9Box/lo3NS9/ -EOvNOevjjpBlK95tv0EiShPrBROcBvNxbxlteCtfVsMXldM8Oz12MtOZiT3nQPSV -TVbH7v/QNN4E/2Szb4sAO86Rwv6fGXTKsmOM6k1yiqi0y6j9WivwuKn1IiTpAajW -Cf8uQrXFJq0nzklq0b+W9xGHlCQJIHGCUFRHFqLIf1WHfi/jqYsjyUsAGMtcmNoN -1Ggzh2tX8vEWH4Qy0g+bEixlfn0eP22UWOyxmeWpOkDtQuBFBau0Dtns6I917X4Z -INRls60WtPoxWwUJqWfw0PnvkZef6e7qzzT66QSmuyMtIseTq3DYHVd46Rgq9c0d -ZjO6+irreIfmzoZuzHJnNuzj1mZeladcbzimGjrB9mJggspNBrM50q7YcuMv+y1o -mQuAIPb6L5u401Ti3BLMC/+fsgj1gT90r+cWwckXmtZZVaYB6xP8K3qlAzcf1lP6 -TXmHRC3wywS3aDFLHU446KU0NwjQ98Jnc/f1ZNYNTtnlx0Cn8JMihXVFTDn71fJl -papY+tk1LGp+ViZzkLoQ7jrjrOmkQ/uKnTxqITpBRlZrv0W1upz5nqdET4bd4D8n -+5r5ctsMCY/0Aa8VksgRxsE1fp14XItlH94aI0UFv+eZ1gJwsal5qyTP6phtP2FB -i2mZP9tSjggkB15YRzlQ5q9NHG8uBktTVVC9E4blDTVaYXfxe7RiQ4eFNIKGbkET -4a07zb+wMIOXSxhlj0+qK6LJubZsDYQYSSjaaNL4hN4dn5Zfh20WtuF1lD6+Ujcb -Rqqa3CEVe763CSolSijrmHOd/GbfKQneeo1nYD8TvwSiGiI7iIcF1Y6LcMBS1fIx -FIfiwCjbwlS5C6lfW4ZsqMutbH33WjLlgsibrPG5t9kIRm+AeGrdpu79Dr4VYBjc -FPZ11LbN/aKaokjCXomK9grUpIgT4Xxra4yLVprUjgzfhDBdWVO3011SO77OgCtQ -xJYC3V+nmh26VoAPHUEw1Ep+Nr3PA59TdI87tNqDq1fG17Nc9cOOeUbgu/84oP2J -KEGfZnNsryTF5rhIagFqUICWvJLn+IuOF8HQbtYnvQxoJLy30YoFij3QX6K3f/42 -BokjnHXI5RELnVWL/Bqfh90x+cqIaRKUWzrK0P9dirTDdpiiI3wIXcUhysjvoBTw -Gd2OQTAWyQH9K5bkiaeloJaemPWcnbB5ELJTNnAtu1uNIP/khHI+rGO9NNfNTMJZ -MC3R7B/VCa5MhTt24l3dHeiEKbBjG5Va8D9zEVj17DQJWS8/TuqeV5CTCoBM+Z0n -pae0ldCJEG8yuoLEmy3qcTZC97RwSqHoJ04HxFIzAKon79o6TaNKCeuSlWP14X6/ -uuNvFvEj5MCkJKH+P0lZuAtcheZQqA4V85tWg/a2K912CtAdmaCA+kz8DzV5kMNT -CNZT2mnGP8C4Zzi2ntX/+NuitZIeVZYdxyLUj0dIv/5HL8lRxjRonI37Bvs3zu9o -edN85602I9fkI2mYCgI2IeFRb5zulrDIurrlrupl4r81h1M3uaFfFWtZOqtWr5qp -Sr61dCr7Mm4+/o/ex20tl7YFT3UcdvLZYV2oX3UVIqnhCQfEDDFmQ1kX+0sime1w -9yXWVlMhERrlA0VxABj540SVtA/gpGIP6/Vk6qcyW1k/QlKldtqHpMDpnX6JF11j -EbXFf49NExU7COmWb3TJZqTa+P8mYaMpVI6pe331gyIcuFuVKB46o+LtbVYhwfpA -g8O1xb1qsFZI8D+6sccq+4C62EkFDxWyyXf8BpmgWR1+asiJOquNlZS+//IXyCx8 -xLcePsMS6CfXY9uZcc+JYDdKDDNDYokaXt8rJeo4AFsgyzp2yZ8KiVbian7k7uc2 -5JpxMSB9wop1ZR33TT4T19cuHeedvnSFvJugu13TT52qoE3Ho2IOQ9kGUpjSBXXP -K38dQvKT+NlTmhCspbrzdfvAQEQDijPhITXR5GvqmxkwZgDGgygvjDxRMiwdRQXw -ZL3ifs6XE71pmZmK6MpmT1Bec75mVoiX2bNqBZccWJC98jMUCMwSdA0RAr6PCk0E -VB2bRxes0dnuztnV4zuJFGOG/oH6r7QMwKp61KGwIeZhwvjeLIvYWWzO/oM4a5Nu -cm1fKzp4c2J+FU+ogJWSF5Ek76oYdo0E6RSB42LBAPxa/HjIBDPAR9nqJ9j5mhvT -4ZYb/1PuoBc4uwCG3DqbxZTjwT6TvIdLBkHMGkYw27TtCO7KksepUKzez5jwOXZv -oQCXjop3J2LbQ4NkASStidjv+zCJIO9Cl/G1izcgAWSyijb1lEtGnD1lguR7rraP -co27WJZ7aI/OtSCGM0ezSfOodT6Am6Bg6AnzsZ7OCHadbsmp7GubZLCyE0dQf3BF -c8cHqk44h18oCT4ieiuYBQ5H1MQWDwgCAj4Ji5DoiPhg8FTr36PIeaIARl5L/cWa -lRbv5IW1+LRvnLamOBJ0EncQvdt9ohIigZHpVrSV+f10WulM9pFvRNOKADl0qKbB -ylln62Qg/gNSt+VQOBH5AwBDm/PTyKkSzn9N0Rs4mnSiRyjjOPi3sle/d6zJNnUf -RILRFHcfMGeKS7m9GTOVRCyhDTP5wnBVYr6YXn+o96Cx85Uakp08nqCpQqUerGss -XW8o0aaWSjv3M1HVf4ceMgtAqWDCcpUuCrO+l3USFztsy/Y+yoP/kN0a4UCFn0Lf -ccrvQdHxJo9qfqGFSa1W7dlDYJKGDq2oSyREa5J00lmkroexhJUPIeY8PTKE/SC8 -jaaoFBG4PPiOVZhEgO6KtjiDqukrBoEgvu891wU3i1vhVxnFmnkpEKlZlnxnIHsJ -eHXSwhHwzXrBVp7osbgV9SkuIasxGL8WEgsjQviBuopvNXMUBjP711K4EJ/T8NZl -u7tAW8SuqTPdlk3XdyarE9gnZceVtyj8j3RmLV4tW8jfskYouXHDsiosURbT4TDV -xXenZXNvKLOL3SjYwZeae1kRACLq31dcIbdF/l/W84c1vapME6CEBIyhGdhQG5HS -ftnJqB+0bTBbguKCnnFuNv/thnXqF1SmyKJ4TXg5FSOumkIPmqhaQIueZMDz3/kX -X+IBh0T4x7C8TdbPOa3i0JI047jr6ML8yXUt4vG0aGPFAOeuyunvbiFU+ARVaTJ4 -W33TcIoTXW+nphl89cPcum4pdslO6qO+kcBrsR1hI/7aPia3NdujIctFunil4Ryz -xl8o2tJIpYSoehnJly2/ZCN0sk7cxmQVrY/KizsQX6cGHhFDLhRV4lvJpYoCfnxo -EP3rmLWQvjuzxr4GG/TG++8eo2WSZ/r+d/qbNvK1lRHqKhWlgHWbw9xlQc8wg4zd -K9pp2vIU9j2/cE3v/v1VjuTzomNGOl3wZa6soI1qi0q1kGqr7UOitqrfMccrZhdX -neogbgnIIYXjJQQHW5uvLJDTg3BhCmaJCNyi73+8QL3jASJMGpQIY8pEd91IuDVs -QiSU1sxV/BTdULUR3Zy4h/nwC+wHUxhZbunGCanPE+a8ZM+KM1j0xSR7qUafGwF8 -V7OeUhKowoNpJJMCt2r32cFJLLGqicjGr8Ir8U/8VR/g4XiNHOgUlIVca8OmME/T -6T0LJ86f78uQ/cApB4IoW0XOP6bZ60aRRuN/Laeu7dLglXqiP2PNyMq1kpKoKOPi -GZOnwlPer0uRVL2f9EpAH+L6qKM4/h9FD/6bs1xdADJR3PaDndm3SjwRJuzTL7Nx -vSKV6EuzHc5JDGil9QA9DtmzhxlIYoleIE+5VJsn01Sv9FAlFSPo3r5+GTtAoBrg -6xrg0irx7n4eTY52L6cK0Ml8HcS5ePCGNBbKF1w3bBpK8GoOrBevtW1mfTKgyaHU -7FLVf2z2ei5fFhlYdMyX1iUoOv97m6WBN9HMmYXmmKkFatN3xfMXFNYW24dNe8lT -oUojB+mJDrOFD6NJqR+LcHIRB25XTY4oEUC8navhEafcEQXaodEdfp4tjxBMVV/8 -UUuTjkIJQc9wB4ndIErP9jwGzfzX7NZaWfKYN5oLD0E52U16l9qlSKTJU0JP8XYD -nLxKZoLUP3KJE4a8mJOp9INOVWqdJNNfToD/gBMtcCg0HwFk5VIqgy2Q5QsaNE/k -vaCa+MORH+jPFJSRseKg7qvVGlieinsxT+ilNwtbmPjp+3uzoD0BIad4TJ1kKcab -5eXW2K7tb8IjRgq6l0A3GGlu3aIXE6IQiM5OdDZwiUPPp/X77mfK8KxcDkZTU+W/ -zwMwKisnVaG5jPC9TMAHnZapj9U8y5lsXaOgH7fY6Ov1VBwelyiL8qvVeNOo5HR6 -ag8l5qP7LOGRkoPESNftCfiJxBS17qN4nTCepp4Ku2w0VLrz9N9mEz2Ul2am4LoA -X5EjmqZnxwiLRtOKhEjnf382VXmb71QRLB59jKhmbBmOq/DZ5EEJyOZRIHDZ6f50 -JvTm1NJZNQ66ZE2elUrFbgYqArCzimKvj+INmOz7CICotSL+6xPPLwq+69w+mw5Y -jfNJ0T74W6q8yZcAooSlybxwOjCd3RmaV8Qo4eBc/ew3UYPP1Kd96C9qxsqTPSvi -iNZucGvTOQV70p3vSSzCv0Doyi+mrTcrE9UITD01urx5zWPrvn9a8hhvtYtLYkOs -5kWGqgWLu/pvT+dD5jKU0yh6az90j/b/g7fjk1vVO2XHaTYs7gO60hSUWfPKC6W6 -cqeywNPcnv+bYZtoUwpdOLI4S7UwLTOrFxF3qjr0CV8vSzk0PuBIrTr0Xpwx1Q9z -NyJNInrZAEXiabxqruFHziylACSXsNXuCFo4FtpZkpmx4sABfNn1BvwF6lemy6iV -sH0I9mXtsao7NodERGrHSGjyDaohFlgmUXrIzypDcnGc32XoE7+AvYAiYgAOdzX/ -7rRzTDTlTv5ss9fHs2jtKSPERNx124Mj2K9VX7E6KGuy193cZyzhAEmrYKOaKOY1 -5jyX2hiQleB1xjOWg4cNhZTdEoGP4DTcQ13Egx6khUBALTTJhcJU/FX0JQJb7kCp -7/A83zaNAkcHzc4FdnhF08VECQRn/ZUvU2e342AsmxZv9opI7CSD9UjfCXYvfXnN -od7wrLGsBUChrVHLDGst22GM3vIFwC6gAKvCYrf3WL8wFO3KJGXT8AyGHijV5OyG -X7OeFHJCWoKU63G24FGHm4rKRW9S0mn1sxYVfhf5THjJXs8RpsUsH3+G6d3paSb7 -v58VAEkW2p2dyLA2cjqHLaaGJBgYuqyb/hP2kVu+lUMnzcyabncrcq0g0anvrA4F -R+WdNtV6KvWYToiOBQcpl0176f/11dGU/sWtGYkbYF8JXhf0GUVadM4VAC6BCWtG -fC4TTN0FuL1ZJjRVf3mLGOTQfPzag8Y0b2JxvdQo5dJ5BSZfB42LFj2VEQhKcAM6 -rj9/rJj0TcsG2B8wSrJn8KpvyIXTk8CluxMRSZszYOF44EitBv/gldotIS2uTWNN -8t1dQvHGoV/71U1QfzQW1ovg5B4HGIg3z21ju45bbQYvR2Ay8//vjHQmfvyE/PKN -a2JnX9tf6ElaAkr6/mrvskafHTktE+ttKQwBkwgfFUu5hX74wcKY1/JCq9wOujA5 -HP9xsGpKB/U+M2MHWuCqhCx7hBYeb/9/iJuFLFAPJfr/X3cJq7LRqPNmAJRQd5nf -yZod1fQ4AS0VM2bWeN1+pqDuZlGu5bOJ3S+9/B1AvE+QgAXWWNf6dwS2KvbiHdih -y0EYjldzN+fY1UMt+z7djTq51sdBg/1dS4xxsu8Bxdud1yf0su1TsENfkDI3SsP+ -V8QG0/FjQcA8ajCJSLNo72SJ/vH6QxtQ5fcIbb26vdAPY9ar5/Db2nypeGABpZnF -BNyBhqj95iO3JZf1fbsBdqFPK56NNDVuWqLMSFi9nYBuEeetnY82FNTvlae/Otos -qx5LN0rKoD4q/nLS01lrCOJRv43g3vdPZA3DvQaGO5rt4bkFibWf8bZ2yetHhIIa -dXKfi9tRkTXzYFZlT44DSf439CrVrOf6B8wuy4K2xHWft1jIMVnRuXBjY9fynVUd -ulMMPTi9WGvrwSPgGRlql4i7wXQv/efCrZNgQJnxmNOHofvs6ju8VKRh9jiTBOrd -ei3bOspnIS/kuCylxQUgJe8u+JeRJu+YahVShoCrW2MtiDFP/tYxOZmKVYf1/k4j -aHpulKoukPlr1QlnY8CDaEDXk9S+ZnPjxLjbvgBQaIVrxJj1OQteJK6S04fOjF2j -epyEJJmoFqL1FVVhOKhVBh0M/hq18CN9rCicpVfUVjIRe8zMf97HcfVeSUQ9G9RA -2TVl5qCL16B2SS4LD+41+6kHiKkuiHqXRKekUqDQCFOp1J10thiZWF15EA6ms+39 -ab54P+ZmfzNERyvzTTc8bgEhiepB1y0YrgG6U6MuaYpp+Jq9TPwkAVqkaJbe3vtO -21gUdQFW3mzUlGhWnCDJShKiCPwtF6zYfwcG4yERxoNUYCm+JYnRgRIpMQXyARyk -inbmp1mc+DJY1CrfQu02mrcvRDopfndyxruIt/RKdTpFqh8VFp8H0fCQWJdfy4Gr -zSx8rRZBxL7I9w7tWXXWuRcaZGAUEAFZYM4PN1aAcpwXt3rePli5Wr9Lg/5dFLyk -fbhaswP2i/x3HZlOZB5xb25mW+c7PeSGbx8A0zrGS+/oRy7Zy4ONZ2cNy2PEg0x+ -CimyDQqdrnkA8r4EYndlprsKhq9r5VSuzgfZwTYHqAzPkWLDGbBH1NC2vBM0Jwz7 -5y4dctP39I7s2K+E31+xVTOlpY7xhhvXjERmjIOKQ+dCNP87ay3fxd4pmM73i7L5 -qFkvNdgDe3bIfOsoiXFpM462cAyYiX9otH+eyvE3O5CTbYO3jLA7DhnYttGB2XRz -Povj8ODHY6F8P8HNVPU0wl18f/cqvdkhrndOfAocOdPKTUgmw1OW6r8gw/AeAoY9 -q49plnYTPKRNyk0u+kKmcudzMpzvzbE+F4t4iTYWI3NzdUNdljPcpKz6PegYw9yU -DtXYS85a/PSLfaAuoWHh7HkLL34Oles2a/7afyKRaLdefoMlTfMGAJn6smy3wVTK -APr5Rl2PZkhVDEeslB/vAd7We7oYo7JVyrpT2NOT9kYxCmvONnFu1jDJ3x3vg12I -idfOnfNTmhxab1Zsil7aIoqWgE4UQh5CsbChzGDcMZCGFbnP3hxRVPxFR6GCfSVP -Ja6W0ZWZCK1cXkayz9PywlQ5JdJKmjdH1qHci8arBPp89OTL9CL9k+bOPTvp5UvD -3KMEaxoumwsrCw92OO3nptHLXv2mcaFGC/Y1YEvvv0Bpb4lD56Kv/6Uizv5tud2E -0WWY63fJ+31C9pweMAZVMEHZwSI1iq5fgNfFGMnS1h8dxJLolozEgHZY8lTaL4fx -BazgRGbsEX/Qb93Ld91ZXXXLgMcBNN5shsa7K/IsmMUGWtiUzlUi4tBIit5QmOvj -ICQ7UxkzLE/LGat/8tk8823NSrLHMTetjhpHY8PTQf497E+rAnIvcHlZok3HTiHX -YMxGSJxOJI2ff0x3byiwZPf/dgZwK3/LDn5ck4LZx1qXdY6/3Dg6vFxCP4avJdnj -rpJk5BstSwca7l8d7PwfAVz9tKYxzhSBDcnk06o728KNfdYALy6h05jSbxq3d6Gm -0+Mh+ydr9E9uRT913TOVcsyXVwRrIHqBOJfxlyG/0HYKpN36WfXMDPeDZAzZ6Xbq -7SPjjVhEyPCK/gCOHDriBppWHDn/GhGwS8Jv+fWiSyLmqSSXtgO1oxrGo19ugV1S -K3or6AbePviDUkk/1NRDokMyHeE7TWZmzllmiZ2d2rwI+/+l43zWitStRW7xH9Nd -YFo343oWHkX/se1jB3EKHmSmDmLa4etmKXN1oIxzl7lRedRMfIb8RtqZ2lX0pcw8 -UwRcVAe6tMqRvUNMArZO55AUJ/3PyFL0m/OaxUqDt8JzsM2i4V4qfnoELo8vGmzq -UWYyDlcpPBznvEPRGa6mDq5359VIFAQGlySpXmFIwoTH3EkirY3j7DnmFC1d5kvM -+JggJElqkY/QUM0BrXrmUEzKH2AxjfffUKJNBrufTnkzyJLxasUbBO5f5wiIMLXU -IlEZaepep5tcgIkQsL5Kff9p5vIL2IrikhmdYCX9Npy8g3Ks+18mPXGb5wG2rgts -ZzDg7y0tyjMeLM7YZzHDjMD4qqIOmirEgfHxEGgLYn+fW4N1JiftzmU8dH9Z/Edr -xnNT+uxlNboT3e/QMD00/xDsI4vl8Wknk3YQEGvD6a89sBUBkEg4sAUMA+WZfaUP -Z/D8M9vhDsR1V70LtOWA6fld07az6JBer+A8M1Wq55hjVrR47DNa4xy3CeTxqLji -Yk3Dp4KAWJMAR14i2aLWTSshwkdHg8Sy1adCvrR/NdwOiBQq3uUAuA8jiCvSxQTu -LVU/atAAtOFRIGWitiDDn5l2Zyqqeo4dgHlB51cNvv0xRc0csi/ijddguEX+Ok10 -xbhylmCKDh3h7Lh1fA7kTdfW6joTSnIvJHJR4BRU7+41bLkxWZfLpkRivvFGKFm/ -lBx2HPZo9DAa5owqkNb0NzINWwSlgR3NQ8wNGmjHsrKiHGzi3vgNlYvlX653X71l -2n/5iDmmPR0uxJJm2nux2BhzYHr8rT09FLJOElSJLv5yDKkrgkNm9HNdFB6ZDsEw -as3rz31RjCnjp6SRUcw/+cjVrPklN/CFH3t44OAUnF+zGCrMAZ2Gvu7pStnYEf4q -cpLyw/KxCJSwOrwp4+Oe08y+6YH8ja+HXzUlsIrtJRaBdKnXbzUNX43Hz/0Ku2ov -D71ID06Oj26FWpE0EzfAKzPXxwKOMOK0I588v1EzBxMPMAkX5vKNv1ibs0V1whYl -ZdQd4Cyudq0NPSzr9TGRlCd6i4YsNqfkvXqPjwuXTNxzLZTxhUFIrkOwFKZ3UzeD -dLaksE4Aknpe699BHwfbDDk5Sb5tXl0VHZvSoCSckLHryULXysINJKEWbPDiap1S -nOxpJUTd5FI6bOVM1UzHtg4E+M7n1+zCIjKRi+1JFKTzvzKJEWyhIwaSdzyCScI3 -Pl7DsleNI9cnAObdHSg/kZpqJyO1NUgHm3X1KXoI3A/NtmCOfIcg1vceur5G0ZiY -SWEYvKgqNqZ+FpxgQuTt1hXkyaLyvFs1k54MTurPE3ht3oZP/FvDP0YS9W7U3yIB -5CrRWMOKq8j91ollzBwCPuGQ7+TSSHVVJafkYsAQdVe5y5rdMcWfHLN3U9hDQnlJ -jSii/4+AtuUR95BVojtIaw/FcfH+LS1Wnersy3SGNJ2j0wMSwy9oqFAMdWYFGjZU -iMNMt3BFxAaQwLOz+WWAFh6PMissdM5B5OLzYxZ7gQ+0ohYp7pO+snwfQIQHjzJM -CC33CqOjiB1bJP0tCnPUCidXwuqHn78o8hzesexx9HRbtScdZehj5R0ccyq5yY4C -qAMj6mTvrQ2/EKalfMnFS+UmGyD9W+ZkgMF62HIh+0x2Kce4e32mkWji9MRFBtL9 -Yhn6qMciA8noDdaQb/lnDgI/kBMNUSFsQcCynkHffvRWumaSm2+e55Mga8LpLpMb -47JcYBhcNn02S8znhP15z3b96SkRh7xHAGc5ALpIy/k8GNjr/b/bWACy2npliLQz -GFqwMhRSNmiKZ4v1CPaGE+2/Dy5DWpq/7sobQFNCOnnDO/BYX5/vZYzLZF37chSd -FaPZa/0pXiGr3z3O7xZGx9rLon2TgITzoHIxw/vSNt5saI+/iyeGfmN7EnfTzNfd -Iy9DTS5FqVddAdFtPc407zb8AsuRVW5Hw38Fri4B1Vu/JflJPfIGr2HIgll4wsFN -JKtzYcx34/Xldak4wNfPIlj/UoQ3zFjj5Ov/01MDO3cafvoL5l313W/g2v9k+J5k -iLA8gu693kAqH6zL3Zn0jCS7aIoTdN8P49GHs5xGAMR3n65Kw/Ow+9v8KrQ6JRk3 -ugMqqg6wsC07SiJ+zJOsN2HnYCX4xhI8RnDixzdYxWx7kmtMbgLedzhafgMoHx+v -E3vWnDegioTGMuoIjFGpxu/3PpL+tkHypx8AWz0PLumYsj+8KlV4haNBu6v6w1Q0 -rtkZ3NE6ywu82mVrMiD+wUOx5F0cqBp8IUOzMdUmJmz+NQxepBSXLJySFqHOBUGh -yRvVLtVefdg3UqyW2oiL3jNRUksZmZDEcM9djhWNJE7wmbIAoE/gH5fWulavMvp+ -3MVdS0KXmQGXiXqK4VF4yspoSdPsmG8VnZO3YRH+FEJPzjV8oN0LaAMAHNrrw+YN -4j/V4pJkVSdEYVyMJq1w+rICEds1KG6XGngryyh6OlR6kdQDzcUDm29IY8Ml603K -/LeG1roUjniL62u1UeZngyZilY4bi1FETg7+ckCwfmAwLyH8SJFEvmpPK/H2NrF9 -w/AE+QHTL8BNDbM4NBgqmqfrKggFFf/eFE7AxrUceMZZXBfG9N8DfhOiyt70JjSj -9+UjBeRLuZ/JhjrKo01bFPjtbQFoy/2yo2IqYPYCo8G6VN2y/qQKHLs7IQ/zyShQ -XNhPezFO3P0wpw4QpDfkQJVyrCEzoEohmlCoiSelFgMhHywvFowLA3xHNM519O7i -ZROF16uDE3qcOcIPQA4Me4g5ZCM8aouWwRbV45zpRMV4gnMoCBp4VUIrnkXQmfHv -hlV5uZZE9PB5Ms6Xb9GPRbpFkTbFXaan2PoetESI+cfw3HtjSdUv2w== From 8d89965277d92f76ba1926238ca102f0a3ab6fa7 Mon Sep 17 00:00:00 2001 From: ddimatos Date: Sun, 26 Feb 2023 18:11:28 -0800 Subject: [PATCH 026/413] update profile created for mount points Signed-off-by: ddimatos --- scripts/profile-shr | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/scripts/profile-shr b/scripts/profile-shr index 22da6d2f7..c827f3037 100755 --- a/scripts/profile-shr +++ b/scripts/profile-shr @@ -25,26 +25,26 @@ bash_enabled=false ################################################################################ zoau_choice () { case "$1" in - [a]* ) zoau_version="/zoau/v1.2.0";; - [b]* ) zoau_version="/zoau/v1.0.0-ga";; - [c]* ) zoau_version="/zoau/v1.0.1-ga";; - [d]* ) zoau_version="/zoau/v1.0.1-ptf1";; - [e]* ) zoau_version="/zoau/v1.0.1-ptf2";; - [f]* ) zoau_version="/zoau/v1.0.2-ga";; - [g]* ) zoau_version="/zoau/v1.0.3-ga5";; - [h]* ) zoau_version="/zoau/v1.0.3-ptf2";; - [i]* ) zoau_version="/zoau/v1.1.0-spr";; - [j]* ) zoau_version="/zoau/v1.1.0-spr5";; - [k]* ) zoau_version="/zoau/v1.1.0-spr7";; - [l]* ) zoau_version="/zoau/v1.1.0-ga";; - [m]* ) zoau_version="/zoau/v1.1.1-ptf1";; - [n]* ) zoau_version="/zoau/v1.2.0f";; - [o]* ) zoau_version="/zoau/v1.2.1";; - [p]* ) zoau_version="/zoau/v1.2.1-rc1";; - [q]* ) zoau_version="/zoau/v1.2.1g";; - [r]* ) zoau_version="/zoau/v1.2.1h";; - [s]* ) zoau_version="/zoau/v1.2.2";; - [t]* ) zoau_version="/zoau/latest";; + [a]* ) zoau_version="v1.2.0";; + [b]* ) zoau_version="v1.0.0-ga";; + [c]* ) zoau_version="v1.0.1-ga";; + [d]* ) zoau_version="v1.0.1-ptf1";; + [e]* ) zoau_version="v1.0.1-ptf2";; + [f]* ) zoau_version="v1.0.2-ga";; + [g]* ) zoau_version="v1.0.3-ga5";; + [h]* ) zoau_version="v1.0.3-ptf2";; + [i]* ) zoau_version="v1.1.0-spr";; + [j]* ) zoau_version="v1.1.0-spr5";; + [k]* ) zoau_version="v1.1.0-spr7";; + [l]* ) zoau_version="v1.1.0-ga";; + [m]* ) zoau_version="v1.1.1-ptf1";; + [n]* ) zoau_version="v1.2.0f";; + [o]* ) zoau_version="v1.2.1";; + [p]* ) zoau_version="v1.2.1-rc1";; + [q]* ) zoau_version="v1.2.1g";; + [r]* ) zoau_version="v1.2.1h";; + [s]* ) zoau_version="v1.2.2";; + [t]* ) zoau_version="latest";; * ) echo "" usage ;; From f04e2c908b5af765180d8585966b0b7c693593e8 Mon Sep 17 00:00:00 2001 From: Demetri Date: Mon, 27 Feb 2023 10:23:49 -0800 Subject: [PATCH 027/413] Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos * Update changelog fragments Signed-off-by: ddimatos --------- Signed-off-by: ddimatos --- .../588-zos_copy-emergency-backup.yml | 5 ----- .../588-zos_copy-emergenxy-backup.yml | 6 ------ .../fragments/599-copy-carriage-return.yml | 4 ---- .../600-zos_copy-special-characters.yml | 4 ---- .../fragments/601-copy-loadlib-member.yml | 3 --- .../fragments/602-text-converter-import.yml | 6 ++++++ changelogs/fragments/627-all-modules.yml | 19 ------------------- .../647-zos_data_set_record_format.yml | 6 ------ .../fragments/648-zos_operator-examples.yml | 4 ---- .../fragments/650-doc-meta-data-updates.yml | 11 ----------- .../fragments/659-zos-lineinfile-f-string.yml | 8 ++++++++ .../enhancement-518-text-converter-import.yml | 3 --- plugins/modules/zos_lineinfile.py | 2 +- 13 files changed, 15 insertions(+), 66 deletions(-) delete mode 100644 changelogs/fragments/588-zos_copy-emergency-backup.yml delete mode 100644 changelogs/fragments/588-zos_copy-emergenxy-backup.yml delete mode 100644 changelogs/fragments/599-copy-carriage-return.yml delete mode 100644 changelogs/fragments/600-zos_copy-special-characters.yml delete mode 100644 changelogs/fragments/601-copy-loadlib-member.yml create mode 100644 changelogs/fragments/602-text-converter-import.yml delete mode 100644 changelogs/fragments/627-all-modules.yml delete mode 100644 changelogs/fragments/647-zos_data_set_record_format.yml delete mode 100644 changelogs/fragments/648-zos_operator-examples.yml delete mode 100644 changelogs/fragments/650-doc-meta-data-updates.yml create mode 100644 changelogs/fragments/659-zos-lineinfile-f-string.yml delete mode 100644 changelogs/fragments/enhancement-518-text-converter-import.yml diff --git a/changelogs/fragments/588-zos_copy-emergency-backup.yml b/changelogs/fragments/588-zos_copy-emergency-backup.yml deleted file mode 100644 index 393a0f50d..000000000 --- a/changelogs/fragments/588-zos_copy-emergency-backup.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system - to its initial state in case of a module failure only when force is false. - (https://github.com/ansible-collections/ibm_zos_core/pull/590) diff --git a/changelogs/fragments/588-zos_copy-emergenxy-backup.yml b/changelogs/fragments/588-zos_copy-emergenxy-backup.yml deleted file mode 100644 index 752131ddc..000000000 --- a/changelogs/fragments/588-zos_copy-emergenxy-backup.yml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: - - zos_copy - fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system - to its initial state in case of a module failure only when force is false. - (https://github.com/ansible-collections/ibm_zos_core/pull/590) - diff --git a/changelogs/fragments/599-copy-carriage-return.yml b/changelogs/fragments/599-copy-carriage-return.yml deleted file mode 100644 index 6e61ded4a..000000000 --- a/changelogs/fragments/599-copy-carriage-return.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_copy - fixes a bug where the computed record length for a new destination - dataset would include newline characters. - (https://github.com/ansible-collections/ibm_zos_core/pull/620) diff --git a/changelogs/fragments/600-zos_copy-special-characters.yml b/changelogs/fragments/600-zos_copy-special-characters.yml deleted file mode 100644 index 3eb9c4247..000000000 --- a/changelogs/fragments/600-zos_copy-special-characters.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_copy - fixes a bug where if a destination has accented characters in - its content, the module would fail when trying to determine if it is empty. - (https://github.com/ansible-collections/ibm_zos_core/pull/634) diff --git a/changelogs/fragments/601-copy-loadlib-member.yml b/changelogs/fragments/601-copy-loadlib-member.yml deleted file mode 100644 index bd704d41d..000000000 --- a/changelogs/fragments/601-copy-loadlib-member.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: -- zos_copy - fixes a bug where copying a member from a loadlib to another - loadlib fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) \ No newline at end of file diff --git a/changelogs/fragments/602-text-converter-import.yml b/changelogs/fragments/602-text-converter-import.yml new file mode 100644 index 000000000..24f719c26 --- /dev/null +++ b/changelogs/fragments/602-text-converter-import.yml @@ -0,0 +1,6 @@ +minor_changes: +- Updated the text converter import from "from ansible.module_utils._text" + to "from ansible.module_utils.common.text.converters" to remove + warning".. warn Use ansible.module_utils.common.text.converters instead.". + (https://github.com/ansible-collections/ibm_zos_core/pull/602) + diff --git a/changelogs/fragments/627-all-modules.yml b/changelogs/fragments/627-all-modules.yml deleted file mode 100644 index 9d7cec183..000000000 --- a/changelogs/fragments/627-all-modules.yml +++ /dev/null @@ -1,19 +0,0 @@ -trivial: - - Update documentation to use link L(...) over M(...) meta. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update modules such doc defaults match module defaults. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Initialize variables to meet linting requirements. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Remove unused global vars. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update Makefile tooling. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Remove requirements.txt because it is maintained in the pipeline. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Complete Ansible certification for versions 2.12, 2.13, 2.14 and 2.15. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update and add certification files ignore.txt. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update test cases with added checks. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) \ No newline at end of file diff --git a/changelogs/fragments/647-zos_data_set_record_format.yml b/changelogs/fragments/647-zos_data_set_record_format.yml deleted file mode 100644 index 1f26a0f5d..000000000 --- a/changelogs/fragments/647-zos_data_set_record_format.yml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: -- zos_data_set - fixes a bug where the default record format FB was actually - never enforced and when enforced it would cause VSAM creation to fail with a - Dynalloc failure. This also cleans up some of the options that are set by - default when they have no bearing for batch. - (https://github.com/ansible-collections/ibm_zos_core/pull/647) diff --git a/changelogs/fragments/648-zos_operator-examples.yml b/changelogs/fragments/648-zos_operator-examples.yml deleted file mode 100644 index bb6e4d29a..000000000 --- a/changelogs/fragments/648-zos_operator-examples.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_operator - fixed incorrect example descriptions and updated the doc to - highlight the deprecated option `wait`. - (https://github.com/ansible-collections/ibm_zos_core/pull/648) diff --git a/changelogs/fragments/650-doc-meta-data-updates.yml b/changelogs/fragments/650-doc-meta-data-updates.yml deleted file mode 100644 index 46827405a..000000000 --- a/changelogs/fragments/650-doc-meta-data-updates.yml +++ /dev/null @@ -1,11 +0,0 @@ -trivial: - - Update meta/* files to reflect release content and version, issue 433. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - Update community docs with Ansible version support statement, issue 630. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - Update documentation to align to corporate wording, issue 649 - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - zos_job_output - Update documentation to remove unicode text in doc, issue 651. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - zos_operator - update documentation and examples, issue 390. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) \ No newline at end of file diff --git a/changelogs/fragments/659-zos-lineinfile-f-string.yml b/changelogs/fragments/659-zos-lineinfile-f-string.yml new file mode 100644 index 000000000..bd5e0b269 --- /dev/null +++ b/changelogs/fragments/659-zos-lineinfile-f-string.yml @@ -0,0 +1,8 @@ +bugfixes: +- zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed + to ensure support for Python 2.7 on the controller. + (https://github.com/ansible-collections/ibm_zos_core/pull/659) +trivial: +- Remove changelog fragments no longer needed as they are already recorded in + the prior version of IBM z/OS Core. + (https://github.com/ansible-collections/ibm_zos_core/pull/659) \ No newline at end of file diff --git a/changelogs/fragments/enhancement-518-text-converter-import.yml b/changelogs/fragments/enhancement-518-text-converter-import.yml deleted file mode 100644 index 691a57273..000000000 --- a/changelogs/fragments/enhancement-518-text-converter-import.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning ".. warn:: Use ansible.module_utils.common.text.converters instead.". - diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index e72bfc6b1..7a26ce299 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -424,7 +424,7 @@ def main(): # Check if dest/src exists if not ds_utils.exists(): - module.fail_json(msg=f"{src} does not exist") + module.fail_json(msg="{0} does not exist".format(src)) file_type = ds_utils.ds_type() if file_type == 'USS': From 6b54f3e07fd0bad34205c3f1a09d45c1e98e05d7 Mon Sep 17 00:00:00 2001 From: ddimatos Date: Sun, 12 Mar 2023 23:58:53 -0700 Subject: [PATCH 028/413] Updated shell scripts for development tooling Signed-off-by: ddimatos --- scripts/mounts.sh | 70 +++++++++++ scripts/profile.sh | 287 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 357 insertions(+) create mode 100644 scripts/mounts.sh create mode 100755 scripts/profile.sh diff --git a/scripts/mounts.sh b/scripts/mounts.sh new file mode 100644 index 000000000..0fcfecb38 --- /dev/null +++ b/scripts/mounts.sh @@ -0,0 +1,70 @@ + # ============================================================================== + # Copyright (c) IBM Corporation 2023 + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # http://www.apache.org/licenses/LICENSE-2.0 + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # ============================================================================== + + # ============================================================================== + # KSH (Korn Shell) Array of mounts index delimited by " ", etries delimited by ":" + # More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm + # This `mounts.sh` is sourced by serveral other files, only these lists needs to + # be maintained. + # ============================================================================== + + # ------------------------------------------------------------------------------ + # zoau_mount_list[0]=":::" + # e.g: zoau_mount_list[0]="1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" + # Format: + # index - used by the generated profile so a user can select an option + # version - describes the option a user can select + # mount - the mount point path the data set will be mounted to + # data_set - the z/OS data set containing the binaries to mount + # ------------------------------------------------------------------------------ + set -A zoau_mount_list "1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ + "2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ + "3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ + "4:1.0.1-ptf1:/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ + "5:1.0.1-ptf2:/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ + "6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ + "7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ + "8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ + "9:1.1.0-spr:/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ + "10:1.1.0-spr5:/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ + "11:1.1.0-spr7:/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ + "12:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ + "13:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ + "14:1.2.0f:/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ + "15:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ + "16:1.2.1-rc1:/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ + "17:1.2.1g:/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ + "18:1.2.1h:/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ + "19:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ + "20:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" + + # ------------------------------------------------------------------------------ + # python_mount_list[0]=":" + # python_mount_list[0]="/python2:IMSTESTU.PYZ.ROCKET.V362B.ZFS" + # ------------------------------------------------------------------------------ + set -A python_mount_list "/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ + "/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ + "/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ + "/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ + "/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ + "/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" + + # ------------------------------------------------------------------------------ + # python_path_list[0]="::" + # python_path_list[0]="1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" + # ------------------------------------------------------------------------------ + set -A python_path_list "1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" \ + "2:3.9:/python2/usr/lpp/IBM/cyp/v3r9/pyz" \ + "3:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz" \ + "4:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz" + diff --git a/scripts/profile.sh b/scripts/profile.sh new file mode 100755 index 000000000..4a10fd3bd --- /dev/null +++ b/scripts/profile.sh @@ -0,0 +1,287 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ------------------------------------------------------------------------------ +# If the current shell is bash, exit it because the ported rocket shell misbaves +# when VI'ing scripts and this script is specifically written to Korn Shell (ksh) +# ------------------------------------------------------------------------------ +CURR_SHELL=`echo $0` + +if [ "$CURR_SHELL" = "bash" ]; then + # Have not found a good way to exit the bash shell without ending the profile + echo "This script can not run in a bash emulator, exiting bash and and thus"\ + "you must exit this profile again." + exit 1 +fi + +# ------------------------------------------------------------------------------ +# Source the known mount points +# ------------------------------------------------------------------------------ +. ./mounts.sh + +################################################################################ +# Global vars - since ksh is the default shell and local ksh vars are defined +# with `typeset`, e.g. `typeset var foo`, I don't want to script this solely for +# ksh given there are othe ported shells for z/OS. +################################################################################ +ZOAU_INDEX="" +ZOAU_VERSION="" +ZOAU_MOUNT="" +ZOAU_DATA_SET="" + +PYTHON_INDEX="" +PYTHON_VERSION="" +PYTHON_PATH="" + +BASH_SELECTED=false + +# ****************************************************************************** +# Search the array `zoau_mount_list` for a matching arg, if it matches set the +# global zoau_version var to the zoau version. +# ****************************************************************************** + +get_option_zoau(){ + + arg=$1 + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + for tgt in "${zoau_mount_list[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + if [ "$zoau_index" = "$arg" ]; then + ZOAU_INDEX="$zoau_index" + ZOAU_VERSION="$zoau_version" + ZOAU_MOUNT="$zoau_mount" + ZOAU_DATA_SET="$zoau_data_set" + fi + done +} + +get_option_python(){ + + arg=$1 + unset python_index + unset python_version + unset python_path + for tgt in "${python_path_list[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_path=`echo "${tgt}" | cut -d ":" -f 3` + + if [ "$python_index" = "$arg" ]; then + PYTHON_INDEX="$python_index" + PYTHON_VERSION="$python_version" + PYTHON_PATH="$python_path" + fi + done +} + +get_option_shell(){ + + arg=$1 + case "$1" in + [yY][eE][sS]|[yY]* ) BASH_SELECTED=true;; + esac +} + +################################################################################ +# User input for Python +################################################################################ +help_option_zoau(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + echo "" + echo "ZOAU Options:" + for tgt in "${zoau_mount_list[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + echo "\t[${zoau_index}] - ZOAU ${zoau_version}" + done +} + +help_option_python(){ + unset python_index + unset python_version + unset python_path + echo "Python Options:" + for tgt in "${python_path_list[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_path=`echo "${tgt}" | cut -d ":" -f 3` + echo "\t[${python_index}] - Python ${python_version}" + done +} + +help_option_shell(){ + echo "Bash shell:" + echo "\t[Y/N] - Default no." +} + +usage () { + echo "" + echo "Usage: $0 [1-n] [1-n] Y/N" + echo "Example: $0 12 1 Y" + echo "Default: $0 19 2 N" + help_option_zoau + help_option_python + help_option_shell +} + +################################################################################ +# Message to user +################################################################################ +selected_option () { + echo "Using ZOAU version `zoaversion`" + echo "Using python version `python --version`" + if [ "${BASH_SELECTED}" = true ]; then + echo "Bash is enabled." + fi +} + +################################################################################ +# Configure all exports +################################################################################ +set_exports (){ + + export PATH=/bin:. + + ################################################################################ + # Set the ported tools directory on the EC, see the tools you can use, eg: + # vim, bash, etc + ################################################################################ + export TOOLS_DIR=/usr/lpp/rsusr/ported + export PATH=$PATH:$TOOLS_DIR/bin + + ################################################################################ + # Set the editor to VI + ################################################################################ + export TERM=xterm + + ################################################################################ + # Standard exports used in EBCDIC/ASCII conversion needed by tools like pyz/zoau + ################################################################################ + export _BPXK_AUTOCVT='ON' + export _CEE_RUNOPTS='FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)' + export _TAG_REDIR_ERR=txt + export _TAG_REDIR_IN=txt + export _TAG_REDIR_OUT=txt + export LANG=C + + ################################################################################ + # Set Java + ################################################################################ + export JAVA_HOME=/usr/lpp/java170/J7.0 + + ################################################################################ + # Configure Python + ################################################################################ + export PYTHON_HOME=$PYTHON_PATH + export PYTHON=$PYTHON_HOME/bin + export LIBPATH=$PYTHON_HOME/lib:$LIBPATH + + ################################################################################ + # ZOAU 1.0.2 or or earlier ueses ZOAU_ROOT and not ZOAU_HOME + ################################################################################ + export ZOAU_HOME=${ZOAU_MOUNT} + export PATH=$ZOAU_HOME/bin:$PATH:$PYTHON:$JAVA_HOME/bin:$TOOLS_DIR/bin + export MANPATH=$MANPATH:$TOOLS_DIR/man + export ZOAU_ROOT=${ZOAU_HOME} + export PYTHONPATH=${ZOAU_HOME}/lib/:${PYTHONPATH} + export LIBPATH=${ZOAU_HOME}/lib:${LIBPATH} + + ################################################################################ + # Custom terminal configurations + ################################################################################ + # Append home directory to the current path + export PATH=$PATH:$HOME: + + # Set the prompt to display your login name & current directory + export PS1='[ $LOGNAME':'$PWD':' ]' + + alias python="python3" + alias pip="pip3" +} + +set_bash (){ + ################################################################################ + # Run bash shell: + # I have have seen many issues using this version of bash to edit files on the + # EC, for example of you edit your .profile with VI under BASH, it will render + # unreable, for times I have to edit, I type exit it defaults be back into + # the zos_ssh shell which does not have any issues with VI or editing files. + # I generally use bash only for history and running commands. + ################################################################################ + if [ "${BASH_SELECTED}" = true ]; then + bash; + fi +} +################################################################################ +# Main +################################################################################ +# User enters choices for zoau, python and bash +if [ $# -eq 3 ];then + get_option_zoau $1 + get_option_python $2 + get_option_shell $3 + set_exports + selected_option + set_bash +# User enters choices for zoau and python, bash defaults to false +elif [ $# -eq 2 ];then + get_option_zoau $1 + get_option_python $2 + get_option_shell false + set_exports + selected_option + set_bash +# Default zoau 1.2.2 and python 3.9 +elif [ $# -eq 0 ]; then + get_option_zoau 19 + get_option_python 2 + get_option_shell false + set_exports + selected_option + set_bash +elif [ "$1" = help]; then + usage +else + usage +fi + + +# Source should have array mount_list +xxxx(){ + unset index + unset name + unset mount_point + unset data_set + for tgt in "${zoau_mount_list[@]}" ; do + index=`echo "${tgt}" | cut -d ":" -f 1` + name=`echo "${tgt}" | cut -d ":" -f 2` + mount_point=`echo "${tgt}" | cut -d ":" -f 3` + data_set=`echo "${tgt}" | cut -d ":" -f 4` + mkdir -p ${mount_point} + echo "Mouting ZOAU ${name} on data set ${data_set} to path ${mount_point}." + /usr/sbin/mount -r -t zfs -f ${data_set} ${mount_point} + done +} \ No newline at end of file From ab645ffe8697f320c14862ee0506f845c27c96c0 Mon Sep 17 00:00:00 2001 From: ddimatos Date: Thu, 16 Mar 2023 13:37:34 -0700 Subject: [PATCH 029/413] Add issue template updates Signed-off-by: ddimatos --- .github/ISSUE_TEMPLATE/bug_issue.yml | 66 ++++++++++++----- .../ISSUE_TEMPLATE/collaboration_issue.yml | 71 +++++++++++++++++++ .github/ISSUE_TEMPLATE/enabler_issue.yml | 23 +++--- 3 files changed, 134 insertions(+), 26 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/collaboration_issue.yml diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 85743b84b..2050bd3fc 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -8,8 +8,53 @@ body: - type: markdown attributes: value: | - Before reporting a bug, please review existing isssues to avoid duplication. + Before reporting a bug, please review existing issues to avoid duplication. + Issues can only be opened on supported combinations. --- + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true + - type: checkboxes + attributes: + label: Have you reviewed the required dependencies? + description: Please review that the version of ZOAU and IBM Enterprise Python are supported in the **Reference** section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + options: + - label: The dependencies are supported. + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are using a supported version. + multiple: true + options: + - v1.0.3 + - v1.1.0 + - v1.1.1 + - v1.2.0 + - v1.2.1 + - v1.2.1.1 + - v1.2.2 + - v1.2.3 + validations: + required: true + - type: dropdown + id: python-version + attributes: + label: IBM Enterprise Python + description: Which version of IBM Enterprise Python are you using. Ensure you are using a supported version. + multiple: true + options: + - v3.8.x + - v3.9.x + - v3.10.x + - v3.11.x + validations: + required: true - type: textarea id: issue-description attributes: @@ -61,7 +106,7 @@ body: id: ansible-version attributes: label: Ansible version - description: What is the verson of Ansible on the controller. + description: What is the version of Ansible on the controller. placeholder: Paste verbatim output from `ansible --version`. render: shell validations: @@ -101,22 +146,7 @@ body: - v1.11.0 validations: required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.0 - - v1.1.1 - - v1.2.0 - - v1.2.1 - - v1.2.1.1 - - v1.2.2 - validations: - required: true + - type: input id: zos-version attributes: diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml new file mode 100644 index 000000000..3c46b8f81 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -0,0 +1,71 @@ +name: Collaboration task +description: Identify a collaboration between this development team and another party. e.g, A support case, dependency effort,etc +title: "[Collaboration] " +labels: [Collaboration] +assignees: + - IBMAnsibleHelper +body: + - type: markdown + attributes: + value: | + Before authoring a task, please review existing issues to avoid duplication. + --- + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true + - type: checkboxes + id: support-issue + attributes: + label: Support and service? + description: Is support and service involved in this collaboration + options: + - label: Yes, support and service is involved. + required: true + - type: textarea + id: issue-description + attributes: + label: Collaboration description + description: Describe the task, this is the equivalent of a agile story. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true + - type: textarea + id: collaborators + attributes: + label: collaborators + description: Who or what product is part of this collaboration. + placeholder: GH IDs, product, etc + validations: + required: true + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported for this task. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index 305c15b8f..acce5523e 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -1,5 +1,5 @@ name: Enabler task -description: Identify a development task that does not correspond to other git issue types, eg this could be an enabler task. +description: Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. title: "[Enabler] <title> " labels: [Enabler] assignees: @@ -8,15 +8,14 @@ body: - type: markdown attributes: value: | - Before authoring a task, please review existing isssues to avoid duplication. + Before authoring a task, please review existing issues to avoid duplication. --- - - type: textarea - id: issue-description + - type: checkboxes attributes: - label: Enabler description - description: Describe the task, this is the equivilant of a agile story. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. - validations: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. required: true - type: dropdown id: modules @@ -46,3 +45,11 @@ body: - zos_tso_command validations: required: false + - type: textarea + id: issue-description + attributes: + label: Enabler description + description: Describe the task, this is the equivalent of a agile story. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true From 10215589f97af381ede53f9f4175774f84ae1433 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:48:32 -0700 Subject: [PATCH 030/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 2050bd3fc..c472423a2 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -13,12 +13,14 @@ body: --- - type: checkboxes attributes: + id: review-issues label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. options: - label: There are no existing issues. required: true - type: checkboxes + id: review-dependencies attributes: label: Have you reviewed the required dependencies? description: Please review that the version of ZOAU and IBM Enterprise Python are supported in the **Reference** section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). @@ -146,7 +148,6 @@ body: - v1.11.0 validations: required: true - - type: input id: zos-version attributes: From 39f6a0f2184764b157cdce91520ef634365bfa4c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:49:48 -0700 Subject: [PATCH 031/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 78 +++++----------------------- 1 file changed, 14 insertions(+), 64 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index c472423a2..5ad715b99 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -8,55 +8,8 @@ body: - type: markdown attributes: value: | - Before reporting a bug, please review existing issues to avoid duplication. - Issues can only be opened on supported combinations. + Before reporting a bug, please review existing isssues to avoid duplication. --- - - type: checkboxes - attributes: - id: review-issues - label: Is there an existing issue for this? - description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true - - type: checkboxes - id: review-dependencies - attributes: - label: Have you reviewed the required dependencies? - description: Please review that the version of ZOAU and IBM Enterprise Python are supported in the **Reference** section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). - options: - - label: The dependencies are supported. - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are using a supported version. - multiple: true - options: - - v1.0.3 - - v1.1.0 - - v1.1.1 - - v1.2.0 - - v1.2.1 - - v1.2.1.1 - - v1.2.2 - - v1.2.3 - validations: - required: true - - type: dropdown - id: python-version - attributes: - label: IBM Enterprise Python - description: Which version of IBM Enterprise Python are you using. Ensure you are using a supported version. - multiple: true - options: - - v3.8.x - - v3.9.x - - v3.10.x - - v3.11.x - validations: - required: true - type: textarea id: issue-description attributes: @@ -108,7 +61,7 @@ body: id: ansible-version attributes: label: Ansible version - description: What is the version of Ansible on the controller. + description: What is the verson of Ansible on the controller. placeholder: Paste verbatim output from `ansible --version`. render: shell validations: @@ -131,21 +84,18 @@ body: - v1.3.6 - v1.4.0-beta.1 - v1.4.0-beta.2 - - v1.4.0 - - v1.5.0-beta.1 - - v1.5.0 - - v1.6.0-beta.1 - - v1.6.0 - - v1.7.0-beta.1 - - v1.7.0 - - v1.8.0-beta.1 - - v1.8.0 - - v1.9.0-beta.1 - - v1.9.0 - - v1.10.0-beta.1 - - v1.10.0 - - v1.11.0-beta.1 - - v1.11.0 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 validations: required: true - type: input From f3577cc019059b0ab162880398ed59056da600d1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:56:13 -0700 Subject: [PATCH 032/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 5ad715b99..074a9fe98 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -133,3 +133,11 @@ body: - zos_tso_command validations: required: false + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](https://example.com) + options: + - label: I agree to follow this project's Code of Conduct + required: true \ No newline at end of file From 5fac429d114c5e5eebf7adf3b824682144ed22d1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:58:00 -0700 Subject: [PATCH 033/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 074a9fe98..0c79b74ca 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -136,8 +136,8 @@ body: - type: checkboxes id: terms attributes: - label: Code of Conduct - description: By submitting this issue, you agree to follow our [Code of Conduct](https://example.com) + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. options: - - label: I agree to follow this project's Code of Conduct + - label: There are no existing issues. required: true \ No newline at end of file From 9ae69316269a407b1b4565ec242d7c376cc8c96d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:58:45 -0700 Subject: [PATCH 034/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 0c79b74ca..98e7f0f91 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -10,6 +10,14 @@ body: value: | Before reporting a bug, please review existing isssues to avoid duplication. --- + - type: checkboxes + id: terms + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: textarea id: issue-description attributes: @@ -133,11 +141,3 @@ body: - zos_tso_command validations: required: false - - type: checkboxes - id: terms - attributes: - label: Is there an existing issue for this? - description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true \ No newline at end of file From 7208aeb352bd7ee9aea13375078fdcf416df1294 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:33:44 -0700 Subject: [PATCH 035/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 130 ++++++++++++++------------- 1 file changed, 70 insertions(+), 60 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 98e7f0f91..463a40a00 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -18,6 +18,74 @@ body: options: - label: There are no existing issues. required: true + - type: checkboxes + id: terms + attributes: + label: Are the dependencies a supported version? + description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + options: + - label: The dependencies are supported. + required: true + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: | + Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + multiple: true + options: + - v1.0.0 + - v1.1.0 + - v1.2.1 + - v1.3.0 + - v1.3.1 + - v1.3.3 + - v1.3.5 + - v1.3.6 + - v1.4.0-beta.1 + - v1.4.0-beta.2 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + validations: + required: true + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported in this bug. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false - type: textarea id: issue-description attributes: @@ -74,38 +142,7 @@ body: render: shell validations: required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true - options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - validations: - required: true + - type: input id: zos-version attributes: @@ -113,31 +150,4 @@ body: description: What is the version of z/OS on the managed node. validations: required: false - - type: dropdown - id: modules - attributes: - label: Ansible module - description: Select which modules are being reported in this bug. You can select more than one. - multiple: true - options: - - zos_apf - - zos_backup_restore - - zos_blockinfile - - zos_copy - - zos_data_set - - zos_encode - - zos_fetch - - zos_find - - zos_gather_facts - - zos_job_output - - zos_job_query - - zos_job_submit - - zos_lineinfile - - zos_mount - - zos_mvs_raw - - zos_operator - - zos_operator_action_query - - zos_ping - - zos_tso_command - validations: - required: false + From dfab5e3a06d1439d3f251a74dffb275b4ffd7c0d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:34:29 -0700 Subject: [PATCH 036/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 58 ++++++++++++++-------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 463a40a00..fc60de354 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -58,34 +58,7 @@ body: - v1.2.0 validations: required: true - - type: dropdown - id: modules - attributes: - label: Ansible module - description: Select which modules are being reported in this bug. You can select more than one. - multiple: true - options: - - zos_apf - - zos_backup_restore - - zos_blockinfile - - zos_copy - - zos_data_set - - zos_encode - - zos_fetch - - zos_find - - zos_gather_facts - - zos_job_output - - zos_job_query - - zos_job_submit - - zos_lineinfile - - zos_mount - - zos_mvs_raw - - zos_operator - - zos_operator_action_query - - zos_ping - - zos_tso_command - validations: - required: false + - type: textarea id: issue-description attributes: @@ -150,4 +123,31 @@ body: description: What is the version of z/OS on the managed node. validations: required: false - + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported in this bug. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false From 36d29fbbe56f9d67545566aa39eca95a78f24f67 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:35:32 -0700 Subject: [PATCH 037/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 65 ++++++++++++++-------------- 1 file changed, 32 insertions(+), 33 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index fc60de354..3e0bdb4c9 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -26,38 +26,6 @@ body: options: - label: The dependencies are supported. required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true - options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - validations: - required: true - type: textarea id: issue-description @@ -115,7 +83,38 @@ body: render: shell validations: required: true - + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: | + Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + multiple: true + options: + - v1.0.0 + - v1.1.0 + - v1.2.1 + - v1.3.0 + - v1.3.1 + - v1.3.3 + - v1.3.5 + - v1.3.6 + - v1.4.0-beta.1 + - v1.4.0-beta.2 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + validations: + required: true - type: input id: zos-version attributes: From 362f742fb43d844edf2f29ccfd4ae1e406c33542 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:37:25 -0700 Subject: [PATCH 038/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 127 ++++++++++++++------------- 1 file changed, 64 insertions(+), 63 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 3e0bdb4c9..4e5a8b1e9 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -11,7 +11,7 @@ body: Before reporting a bug, please review existing isssues to avoid duplication. --- - type: checkboxes - id: terms + id: existing-issue attributes: label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. @@ -19,14 +19,73 @@ body: - label: There are no existing issues. required: true - type: checkboxes - id: terms + id: valid-dependencies attributes: label: Are the dependencies a supported version? description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). options: - label: The dependencies are supported. required: true - + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: | + Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + multiple: true + options: + - v1.0.0 + - v1.1.0 + - v1.2.1 + - v1.3.0 + - v1.3.1 + - v1.3.3 + - v1.3.5 + - v1.3.6 + - v1.4.0-beta.1 + - v1.4.0-beta.2 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + validations: + required: true + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported in this bug. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false - type: textarea id: issue-description attributes: @@ -83,38 +142,7 @@ body: render: shell validations: required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true - options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - validations: - required: true + - type: input id: zos-version attributes: @@ -122,31 +150,4 @@ body: description: What is the version of z/OS on the managed node. validations: required: false - - type: dropdown - id: modules - attributes: - label: Ansible module - description: Select which modules are being reported in this bug. You can select more than one. - multiple: true - options: - - zos_apf - - zos_backup_restore - - zos_blockinfile - - zos_copy - - zos_data_set - - zos_encode - - zos_fetch - - zos_find - - zos_gather_facts - - zos_job_output - - zos_job_query - - zos_job_submit - - zos_lineinfile - - zos_mount - - zos_mvs_raw - - zos_operator - - zos_operator_action_query - - zos_ping - - zos_tso_command - validations: - required: false + From c5743d692e2e4a0e7a7baf2bae51dde792d2c3e0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:52:44 -0700 Subject: [PATCH 039/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 48 ++++++++++++++++++---------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 4e5a8b1e9..8eaee478e 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -8,7 +8,7 @@ body: - type: markdown attributes: value: | - Before reporting a bug, please review existing isssues to avoid duplication. + Please complete all required fields. --- - type: checkboxes id: existing-issue @@ -26,12 +26,38 @@ body: options: - label: The dependencies are supported. required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM Z Open Automation Utilities + description: Which version of ZOAU are you using? + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + - v1.2.1 + - v1.2.2 + validations: + required: true + - type: dropdown + id: collection-version + attributes: + label: IBM Enterprise Python + description: Which version of IBM Enterprise Python are you using? + multiple: true + options: + - v3.8.x + - v3.9.x + - v3.10.x + - v3.11.x + validations: + required: true - type: dropdown id: collection-version attributes: label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: true options: - v1.0.0 @@ -42,20 +68,8 @@ body: - v1.3.3 - v1.3.5 - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 + - v1.4.0 + - v1.5.0 validations: required: true - type: dropdown From 7eca65b2a6105b500d9ec2fd0a26956a6d5c73af Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:58:05 -0700 Subject: [PATCH 040/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 8eaee478e..92c1eca9b 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -41,7 +41,7 @@ body: validations: required: true - type: dropdown - id: collection-version + id: python-version attributes: label: IBM Enterprise Python description: Which version of IBM Enterprise Python are you using? From 25b3f306e946bf9d9bcb83fca6e32a78e8ff5c4d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:00:30 -0700 Subject: [PATCH 041/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 92c1eca9b..6fe2f793d 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,5 +1,5 @@ name: Report a bug -description: Request that a bug be reviewed. +description: Request that a bug be reviewed. Complete all required fields. title: "[Bug] <title> " labels: [Bug] assignees: @@ -8,7 +8,6 @@ body: - type: markdown attributes: value: | - Please complete all required fields. --- - type: checkboxes id: existing-issue From c512ec9457f9a8c58dc8dca56a661e7ebde1830e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:03:30 -0700 Subject: [PATCH 042/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 6fe2f793d..d3570d78a 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -99,6 +99,18 @@ body: - zos_tso_command validations: required: false + - type: dropdown + id: z/OS version + attributes: + label: z/OS version + description: What is the version of z/OS on the managed node. + multiple: true + options: + - v2.3 + - v2.4 + - v2.5 + validations: + required: false - type: textarea id: issue-description attributes: @@ -155,12 +167,3 @@ body: render: shell validations: required: true - - - type: input - id: zos-version - attributes: - label: z/OS version - description: What is the version of z/OS on the managed node. - validations: - required: false - From 2089d33ffb84530e8f10feb11c3f804f69870401 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:09:36 -0700 Subject: [PATCH 043/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index d3570d78a..641789470 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -100,11 +100,11 @@ body: validations: required: false - type: dropdown - id: z/OS version + id: zos-version attributes: label: z/OS version - description: What is the version of z/OS on the managed node. - multiple: true + description: What is the version of z/OS on the managed node? + multiple: false options: - v2.3 - v2.4 From d7d32ac8d6a0cf7ea1f1569c646db436e8f445c3 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:12:48 -0700 Subject: [PATCH 044/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 641789470..556e5b0af 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -30,7 +30,7 @@ body: attributes: label: IBM Z Open Automation Utilities description: Which version of ZOAU are you using? - multiple: true + multiple: false options: - v1.0.3 - v1.1.1 @@ -44,7 +44,7 @@ body: attributes: label: IBM Enterprise Python description: Which version of IBM Enterprise Python are you using? - multiple: true + multiple: false options: - v3.8.x - v3.9.x @@ -57,7 +57,7 @@ body: attributes: label: IBM z/OS Ansible core Version description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). - multiple: true + multiple: false options: - v1.0.0 - v1.1.0 From 717c82170c96e3cad7930ac54755e51b40e38a11 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 17:07:31 -0700 Subject: [PATCH 045/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 93 +++++++++++++++------------- 1 file changed, 51 insertions(+), 42 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 556e5b0af..694b07359 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -32,11 +32,11 @@ body: description: Which version of ZOAU are you using? multiple: false options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - - v1.2.1 - v1.2.2 + - v1.2.1 + - v1.2.0 + - v1.1.1 + - v1.0.3 validations: required: true - type: dropdown @@ -46,10 +46,10 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: false options: - - v3.8.x - - v3.9.x - - v3.10.x - v3.11.x + - v3.10.x + - v3.9.x + - v3.8.x validations: required: true - type: dropdown @@ -59,18 +59,45 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0 - v1.5.0 + - v1.4.0 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + - v1.2.1 + - v1.1.0 + - v1.0.0 validations: required: true + - type: dropdown + id: zos-version + attributes: + label: ansible-version + description: What is the version of Ansible on the controller (`ansible --version`)? + multiple: false + options: + - latest + - v2.14.x + - v2.13.x + - v2.12.x + - v2.11.x + - v2.9.x + validations: + required: false + - type: dropdown + id: zos-version + attributes: + label: z/OS version + description: What is the version of z/OS on the managed node? + multiple: false + options: + - v2.5 + - v2.4 + - v2.3 + validations: + required: false - type: dropdown id: modules attributes: @@ -99,18 +126,6 @@ body: - zos_tso_command validations: required: false - - type: dropdown - id: zos-version - attributes: - label: z/OS version - description: What is the version of z/OS on the managed node? - multiple: false - options: - - v2.3 - - v2.4 - - v2.5 - validations: - required: false - type: textarea id: issue-description attributes: @@ -127,18 +142,20 @@ body: - type: textarea id: issue-output attributes: - label: Playbook verbosity output + label: Playbook verbosity output. description: Provide the command line output with debug and verbosity enabled. placeholder: | - Insert the ouput using this form of the playbook command. + Insert the output using this form of the playbook command. - `ANSIBLE_DEBUG=1 ansible-playbook -i inventory your-playbook.yml -vvvv` validations: required: false - type: textarea id: ansible-cfg attributes: - label: Contents of `ansible.cfg` - description: Provide the contents of `ansible.cfg`. + label: Ansible configuration. + description: Show the current **ansible.cfg** settings. + placeholder: | + Insert for this command: `ansible-config view` render: YAML validations: required: false @@ -146,7 +163,7 @@ body: id: ansible-inventory attributes: label: Contents of the inventory - description: Provide the contents of the inventory + description: Provide the contents of the inventory. render: YAML validations: required: false @@ -158,12 +175,4 @@ body: render: YAML validations: required: false - - type: textarea - id: ansible-version - attributes: - label: Ansible version - description: What is the verson of Ansible on the controller. - placeholder: Paste verbatim output from `ansible --version`. - render: shell - validations: - required: true + From 5f63a9aa7a7bc0494b67c081fdfd9a8a1bb4652c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 17:13:17 -0700 Subject: [PATCH 046/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 694b07359..6daddefcb 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -72,7 +72,7 @@ body: validations: required: true - type: dropdown - id: zos-version + id: ansible-version attributes: label: ansible-version description: What is the version of Ansible on the controller (`ansible --version`)? From 7beff18204f0e49d9622c29b80698077649035cb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 17:15:03 -0700 Subject: [PATCH 047/413] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 6daddefcb..1ab6eb602 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -85,7 +85,7 @@ body: - v2.11.x - v2.9.x validations: - required: false + required: true - type: dropdown id: zos-version attributes: From 097e1ac44ddf516467178998bc6adc1fd3b54618 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:45:27 -0700 Subject: [PATCH 048/413] Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .../ISSUE_TEMPLATE/collaboration_issue.yml | 121 +++++++++++++++--- 1 file changed, 102 insertions(+), 19 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 3c46b8f81..ca9ab8a49 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -1,5 +1,5 @@ -name: Collaboration task -description: Identify a collaboration between this development team and another party. e.g, A support case, dependency effort,etc +name: Collaboration Issue +description: A collaboration with the development team and another external resource. e.g, Support case, dependency, community, etc title: "[Collaboration] <title> " labels: [Collaboration] assignees: @@ -8,44 +8,109 @@ body: - type: markdown attributes: value: | - Before authoring a task, please review existing issues to avoid duplication. --- - type: checkboxes + id: existing-issue attributes: label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true + options: + - label: There are no existing issues. + required: true - type: checkboxes id: support-issue attributes: label: Support and service? - description: Is support and service involved in this collaboration + description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. required: true - - type: textarea - id: issue-description + - type: checkboxes + id: valid-dependencies attributes: - label: Collaboration description - description: Describe the task, this is the equivalent of a agile story. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. + label: Are the dependencies a supported version? + description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + options: + - label: The dependencies are supported. + required: false + - type: dropdown + id: zoau-version + attributes: + label: IBM Z Open Automation Utilities + description: Which version of ZOAU are you using? + multiple: false + options: + - v1.2.2 + - v1.2.1 + - v1.2.0 + - v1.1.1 + - v1.0.3 validations: - required: true - - type: textarea - id: collaborators + required: false + - type: dropdown + id: python-version attributes: - label: collaborators - description: Who or what product is part of this collaboration. - placeholder: GH IDs, product, etc + label: IBM Enterprise Python + description: Which version of IBM Enterprise Python are you using? + multiple: false + options: + - v3.11.x + - v3.10.x + - v3.9.x + - v3.8.x + validations: + required: false + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false + options: + - v1.5.0 + - v1.4.0 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + - v1.2.1 + - v1.1.0 + - v1.0.0 validations: required: true + - type: dropdown + id: ansible-version + attributes: + label: ansible-version + description: What is the version of Ansible on the controller (`ansible --version`)? + multiple: false + options: + - latest + - v2.14.x + - v2.13.x + - v2.12.x + - v2.11.x + - v2.9.x + validations: + required: false + - type: dropdown + id: zos-version + attributes: + label: z/OS version + description: What is the version of z/OS on the managed node? + multiple: false + options: + - v2.5 + - v2.4 + - v2.3 + validations: + required: false - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported for this task. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - zos_apf @@ -69,3 +134,21 @@ body: - zos_tso_command validations: required: false + - type: textarea + id: issue-description + attributes: + label: Collaboration description + description: Describe the collaboration issue. + placeholder: | + For example + 1. Working with IBM Enterprise Python to resolve issue xyz. + 2. Working with z/OS application team DFSMS to resolve xyz. + 3. Assisting IBM support to resolve an ibm_zos_copy issue. + validations: + required: true + + + + + + From 753bfbd626a639ef55d8aa09d51cfea58011514c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:47:21 -0700 Subject: [PATCH 049/413] Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index ca9ab8a49..420658709 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -22,9 +22,9 @@ body: attributes: label: Support and service? description: Is support and service involved in this collaboration? - options: - - label: Yes, support and service is involved. - required: true + options: + - label: Yes, support and service is involved. + required: true - type: checkboxes id: valid-dependencies attributes: From ebb770a535ffebebff10b3b66f7acae530305e8f Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:49:55 -0700 Subject: [PATCH 050/413] Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 420658709..a1dfe96e8 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -1,14 +1,10 @@ -name: Collaboration Issue +name: Request a Collaboration description: A collaboration with the development team and another external resource. e.g, Support case, dependency, community, etc title: "[Collaboration] <title> " labels: [Collaboration] assignees: - IBMAnsibleHelper body: - - type: markdown - attributes: - value: | - --- - type: checkboxes id: existing-issue attributes: From b83571d3b3b0a717ea759d3f40080829fe629f4d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:58:38 -0700 Subject: [PATCH 051/413] Template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 4 --- .../ISSUE_TEMPLATE/collaboration_issue.yml | 2 +- .github/ISSUE_TEMPLATE/doc_issue.yml | 15 ++++++---- .github/ISSUE_TEMPLATE/enabler_issue.yml | 12 +++----- .../enhancement_feature.issue.yml | 30 +++++++++++-------- .github/ISSUE_TEMPLATE/module_issue.yml | 11 ++++--- 6 files changed, 38 insertions(+), 36 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 1ab6eb602..359add494 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -5,10 +5,6 @@ labels: [Bug] assignees: - IBMAnsibleHelper body: - - type: markdown - attributes: - value: | - --- - type: checkboxes id: existing-issue attributes: diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index a1dfe96e8..4f9db151e 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -24,7 +24,7 @@ body: - type: checkboxes id: valid-dependencies attributes: - label: Are the dependencies a supported version? + label: Are the dependencies a supported? description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). options: - label: The dependencies are supported. diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 9485a79a7..c7bced03d 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -5,11 +5,14 @@ labels: [Documentation] assignees: - IBMAnsibleHelper body: - - type: markdown + - type: checkboxes + id: existing-issue attributes: - value: | - Before reporting a documentation issue, please review existing isssues to avoid duplication. - --- + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: textarea id: issue-description attributes: @@ -17,7 +20,7 @@ body: description: Describe the documentation issue. placeholder: | Verbosity is encouraged, the more you share the better for us to understand. - 1. Include links to the page you are reffering to if applicable + 1. Include links to the page you are referring to if applicable 2. Include reproduction steps if applicable 3. Include any additional information that will help us 4. Include screen captures of applicable @@ -28,7 +31,7 @@ body: id: ansible-version attributes: label: Ansible version - description: What is the verson of Ansible on the controller if applicable. + description: What is the version of Ansible on the controller if applicable. placeholder: Paste verbatim output from `ansible --version`. render: SHELL validations: diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index acce5523e..18abe0400 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -5,18 +5,14 @@ labels: [Enabler] assignees: - IBMAnsibleHelper body: - - type: markdown - attributes: - value: | - Before authoring a task, please review existing issues to avoid duplication. - --- - type: checkboxes + id: existing-issue attributes: label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true + options: + - label: There are no existing issues. + required: true - type: dropdown id: modules attributes: diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index 597bebbf2..02901be8c 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -5,19 +5,14 @@ labels: [Enhancement] assignees: - IBMAnsibleHelper body: - - type: markdown + - type: checkboxes + id: existing-issue attributes: - value: | - Before requesting an enhancement or feature, please review existing isssues to avoid duplication. - --- - - type: textarea - id: issue-description - attributes: - label: Enhancement or featture description - description: Describe the enhancement or feature you are requesting. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. - validations: - required: true + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: dropdown id: modules attributes: @@ -45,4 +40,13 @@ body: - zos_ping - zos_tso_command validations: - required: false + required: true + - type: textarea + id: issue-description + attributes: + label: Enhancement or feature description + description: Describe the enhancement or feature you are requesting. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true + diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index f11ca537a..60dee4415 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -5,11 +5,14 @@ labels: [Module] assignees: - IBMAnsibleHelper body: - - type: markdown + - type: checkboxes + id: existing-issue attributes: - value: | - Before requesting a new module, please review existing isssues to avoid duplication. - --- + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: textarea id: issue-description attributes: From 1b938f9fdd2bc103cf2f6efa79e9dc583fb5c582 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 21:04:29 -0700 Subject: [PATCH 052/413] Template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 2 +- .github/ISSUE_TEMPLATE/doc_issue.yml | 2 +- .github/ISSUE_TEMPLATE/enabler_issue.yml | 4 +++- .github/ISSUE_TEMPLATE/enhancement_feature.issue.yml | 2 +- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 4f9db151e..4ea4e4108 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -1,5 +1,5 @@ name: Request a Collaboration -description: A collaboration with the development team and another external resource. e.g, Support case, dependency, community, etc +description: Request collaboration with a member of this team. Complete all required fields. title: "[Collaboration] <title> " labels: [Collaboration] assignees: diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index c7bced03d..07ddbc40e 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -1,5 +1,5 @@ name: Report a documentation issue -description: Request that documentation be reviewed. +description: Request that documentation be reviewed. Complete all required fields. title: "[Documentation] <title> " labels: [Documentation] assignees: diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index 18abe0400..37131e500 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -1,5 +1,7 @@ name: Enabler task -description: Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. +description: | + Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. + Complete all required fields. title: "[Enabler] <title> " labels: [Enabler] assignees: diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index 02901be8c..d39840872 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -1,5 +1,5 @@ name: Request an enhancement or new feature -description: Request a new feature or that content be enhanced. +description: Request a new feature or an enhancement. Complete all required fields. title: "[Enhancement] <title> " labels: [Enhancement] assignees: diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index 60dee4415..beea537e9 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -1,5 +1,5 @@ name: Request a new module -description: Request a new module be added to the collection. +description: Request a new module be added to the collection. Complete all required fields. title: "[Module] <title> " labels: [Module] assignees: From 8b1796aeaf69090dbef9e55288cd9113b4a45ab5 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:17:32 -0700 Subject: [PATCH 053/413] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 4ea4e4108..f71fcf355 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -20,14 +20,16 @@ body: description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. - required: true + - label: No, support and service is involved. + validations: + required: false - type: checkboxes id: valid-dependencies attributes: label: Are the dependencies a supported? description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). options: - - label: The dependencies are supported. + - label: Yes, the dependencies are supported. required: false - type: dropdown id: zoau-version From ab87d126702db1b10f8a1b59b5246468a27872f0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:18:51 -0700 Subject: [PATCH 054/413] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index f71fcf355..ebb81d8e3 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -22,7 +22,7 @@ body: - label: Yes, support and service is involved. - label: No, support and service is involved. validations: - required: false + required: true - type: checkboxes id: valid-dependencies attributes: From 2a92a72b614bdaa16dff3b38a11664d9ddf420dc Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:19:55 -0700 Subject: [PATCH 055/413] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index ebb81d8e3..2137baddf 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -20,9 +20,9 @@ body: description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. + required: true - label: No, support and service is involved. - validations: - required: true + required: true - type: checkboxes id: valid-dependencies attributes: From 65df75647edcf990fd6001ce1c7dbb390940fa07 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:21:05 -0700 Subject: [PATCH 056/413] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 2137baddf..eab9c5b33 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -20,9 +20,9 @@ body: description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. - required: true + required: false - label: No, support and service is involved. - required: true + required: false - type: checkboxes id: valid-dependencies attributes: From 85180bd94384fd201fdf3be99625130a74de7f0b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:27:24 -0700 Subject: [PATCH 057/413] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index eab9c5b33..fcd264828 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -31,6 +31,8 @@ body: options: - label: Yes, the dependencies are supported. required: false + - label: Not applicable to this collaboration. + required: false - type: dropdown id: zoau-version attributes: From 8f9faec3dd41d0486b5d6c4d87dc3e2c8077ed3a Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:29:57 -0700 Subject: [PATCH 058/413] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index fcd264828..bf6db4778 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -52,7 +52,7 @@ body: attributes: label: IBM Enterprise Python description: Which version of IBM Enterprise Python are you using? - multiple: false + multiple: true options: - v3.11.x - v3.10.x @@ -78,7 +78,7 @@ body: - v1.1.0 - v1.0.0 validations: - required: true + required: false - type: dropdown id: ansible-version attributes: From a6a30a5500b519cc86050a979ecda445a4d28699 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 22 Mar 2023 17:05:55 -0400 Subject: [PATCH 059/413] changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. --- ...os-job-query-handle-multiple-wildcards.yml | 4 ++ plugins/module_utils/job.py | 5 ++- plugins/modules/zos_job_query.py | 38 +++++++++++++------ .../modules/test_zos_job_query_func.py | 10 +++++ 4 files changed, 45 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml new file mode 100644 index 000000000..43c6f0525 --- /dev/null +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -0,0 +1,4 @@ +enhancements: + - zos_job_query - This bugfix adjusts the job_name parameter to handle multiple embedded wildcards. + This also required change to job.py/_get_job_status to follow the wildcard feature. + (https://github.com/ansible-collections/ibm_zos_core/pull/---) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d7c156673..e97e30784 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -13,6 +13,7 @@ __metaclass__ = type +import fnmatch import re from time import sleep from timeit import default_timer as timer @@ -207,7 +208,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= if owner != entry.owner: continue if job_name != "*": - if job_name != entry.name: + # if job_name != entry.name: + # continue + if not fnmatch.fnmatch( entry.name, job_name ): continue job = {} diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 8f7d7fc93..0c1878816 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -208,8 +208,8 @@ def run_module(): return result try: - validate_arguments(module.params) - jobs_raw = query_jobs(module.params) + name, id, owner = validate_arguments(module.params) + jobs_raw = query_jobs(name, id, owner) jobs = parsing_jobs(jobs_raw) except Exception as e: module.fail_json(msg=e, **result) @@ -217,8 +217,10 @@ def run_module(): module.exit_json(**result) +# validate_arguments rturns a tuple, so we don't have to rebuild the job_name string def validate_arguments(params): job_name_in = params.get("job_name") + job_name_final = job_name_in job_id = params.get("job_id") owner = params.get("owner") if job_name_in or job_id: @@ -229,10 +231,26 @@ def validate_arguments(params): ) m = job_name_pattern.search(job_name_in) n = job_name_pattern_with_star.search(job_name_in) - if m or n: - pass - else: + # logic twist: o must be non-null value from m or n + o = m + if n: + o = n + + # if neither m nor n were non-null, check if the string needed to be truncated to the first * + if not o: + ix = job_name_in.find("*") + if ix >= 0: + job_name_short = job_name_in[0, ix+1] + o = job_name_pattern.search(job_name_short) + if not o: + o = job_name_pattern_with_star.search(job_name_short) + if o: + job_name_final = job_name_short + + # so now, fail if neither m, n, or o=m/n(short) found a match + if not o: raise RuntimeError("Failed to validate the job name: " + job_name_in) + if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") if not job_id_pattern.search(job_id): @@ -242,19 +260,17 @@ def validate_arguments(params): if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") + return job_name_final, job_id, owner; -def query_jobs(params): - job_name_in = params.get("job_name") - job_id = params.get("job_id") - owner = params.get("owner") +def query_jobs(job_name, job_id, owner): jobs = [] if job_id: jobs = job_status(job_id=job_id) elif owner: - jobs = job_status(owner=owner, job_name=job_name_in) + jobs = job_status(owner=owner, job_name=job_name) else: - jobs = job_status(job_name=job_name_in) + jobs = job_status(job_name=job_name) if not jobs: raise RuntimeError("List FAILED! no such job was found.") return jobs diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index b94be19a7..32914731c 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -33,3 +33,13 @@ def test_zos_job_query_func(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs") is not None + + +# test to show multi wildcard won't crash the search +def test_zos_job_query_multi_wildcards_func(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_job_query(job_name="JOB*1*", owner="*") + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs") is not None \ No newline at end of file From e4b28dea7f3bc1538c29142381e57fe7d915c73d Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 23 Mar 2023 15:37:21 -0400 Subject: [PATCH 060/413] expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete --- ...os-job-query-handle-multiple-wildcards.yml | 4 +-- plugins/module_utils/job.py | 7 ++-- plugins/modules/zos_job_query.py | 33 ++++++++++++++++--- 3 files changed, 35 insertions(+), 9 deletions(-) diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml index 43c6f0525..ae2871b9f 100644 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -1,4 +1,4 @@ enhancements: - - zos_job_query - This bugfix adjusts the job_name parameter to handle multiple embedded wildcards. - This also required change to job.py/_get_job_status to follow the wildcard feature. + - zos_job_query - This enhancement adjusts the job_name and job_id parameters to handle embedded wildcards. + This also required change to job.py/_get_job_status to follow the wildcard feature, using fnmatch logic. (https://github.com/ansible-collections/ibm_zos_core/pull/---) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index e97e30784..8253b7ee5 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -132,9 +132,9 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): """ arg_defs = dict( - job_id=dict(arg_type="qualifier_pattern"), + job_id=dict(arg_type="str"), owner=dict(arg_type="qualifier_pattern"), - job_name=dict(arg_type="qualifier_pattern"), + job_name=dict(arg_type="str"), dd_name=dict(arg_type="str"), ) @@ -212,6 +212,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # continue if not fnmatch.fnmatch( entry.name, job_name ): continue + if job_id_temp != None: + if not fnmatch.fnmatch( entry.id, job_id ): + continue job = {} job["job_id"] = entry.id diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 0c1878816..a6d905933 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -210,7 +210,11 @@ def run_module(): try: name, id, owner = validate_arguments(module.params) jobs_raw = query_jobs(name, id, owner) - jobs = parsing_jobs(jobs_raw) + if jobs_raw: + jobs = parsing_jobs(jobs_raw) + else: + jobs = None + except Exception as e: module.fail_json(msg=e, **result) result["jobs"] = jobs @@ -221,7 +225,10 @@ def run_module(): def validate_arguments(params): job_name_in = params.get("job_name") job_name_final = job_name_in + job_id = params.get("job_id") + job_id_final = job_id + owner = params.get("owner") if job_name_in or job_id: if job_name_in and job_name_in != "*": @@ -240,7 +247,7 @@ def validate_arguments(params): if not o: ix = job_name_in.find("*") if ix >= 0: - job_name_short = job_name_in[0, ix+1] + job_name_short = job_name_in[0:ix+1] o = job_name_pattern.search(job_name_short) if not o: o = job_name_pattern_with_star.search(job_name_short) @@ -249,18 +256,34 @@ def validate_arguments(params): # so now, fail if neither m, n, or o=m/n(short) found a match if not o: - raise RuntimeError("Failed to validate the job name: " + job_name_in) + raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") - if not job_id_pattern.search(job_id): + m = job_id_pattern.search(job_id) + o = None + + if not m: + ix = job_id.find("*") + if ix > 0: + # this differs from job_name, in that we'll drop the star for the search + job_id_short = job_id[0:ix] + + if job_id_short[0:3] in ['JOB','TSU','STC'] or job_id_short[0:1] in ['J','T','S']: + o = job_id_short + + if o: + job_id_final = job_id_short + '*' + + if not m and not o: raise RuntimeError("Failed to validate the job id: " + job_id) else: raise RuntimeError("Argument Error:Either job name(s) or job id is required") if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") - return job_name_final, job_id, owner; + # return job_name_final, job_id_final, owner; + return job_name_in, job_id, owner; def query_jobs(job_name, job_id, owner): From 556dd2ff4e213f5ed9dab3307713d6eb5840166f Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 23 Mar 2023 15:53:04 -0400 Subject: [PATCH 061/413] cleaned up pep8 issues --- plugins/module_utils/job.py | 6 +++--- plugins/modules/zos_job_query.py | 27 ++++++++++++++------------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 8253b7ee5..00ec0407f 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -210,10 +210,10 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= if job_name != "*": # if job_name != entry.name: # continue - if not fnmatch.fnmatch( entry.name, job_name ): + if not fnmatch.fnmatch(entry.name, job_name): continue - if job_id_temp != None: - if not fnmatch.fnmatch( entry.id, job_id ): + if job_id_temp is not None: + if not fnmatch.fnmatch(entry.id, job_id): continue job = {} diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index a6d905933..22968bc62 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -247,12 +247,12 @@ def validate_arguments(params): if not o: ix = job_name_in.find("*") if ix >= 0: - job_name_short = job_name_in[0:ix+1] - o = job_name_pattern.search(job_name_short) - if not o: - o = job_name_pattern_with_star.search(job_name_short) - if o: - job_name_final = job_name_short + job_name_short = job_name_in[0:ix + 1] + o = job_name_pattern.search(job_name_short) + if not o: + o = job_name_pattern_with_star.search(job_name_short) + if o: + job_name_final = job_name_short # so now, fail if neither m, n, or o=m/n(short) found a match if not o: @@ -266,14 +266,14 @@ def validate_arguments(params): if not m: ix = job_id.find("*") if ix > 0: - # this differs from job_name, in that we'll drop the star for the search - job_id_short = job_id[0:ix] + # this differs from job_name, in that we'll drop the star for the search + job_id_short = job_id[0:ix] - if job_id_short[0:3] in ['JOB','TSU','STC'] or job_id_short[0:1] in ['J','T','S']: - o = job_id_short + if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: + o = job_id_short - if o: - job_id_final = job_id_short + '*' + if o: + job_id_final = job_id_short + '*' if not m and not o: raise RuntimeError("Failed to validate the job id: " + job_id) @@ -283,7 +283,8 @@ def validate_arguments(params): raise RuntimeError("Argument Error:job id can not be co-exist with owner") # return job_name_final, job_id_final, owner; - return job_name_in, job_id, owner; + return job_name_in, job_id, owner + def query_jobs(job_name, job_id, owner): From 7b93feca57a0f4f59e3d1ef55213297b439c3dd6 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 27 Mar 2023 15:19:35 -0700 Subject: [PATCH 062/413] Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/683-zos_job_submit-bugs.yml | 35 ++++ docs/source/modules/zos_job_submit.rst | 26 +-- docs/source/modules/zos_operator.rst | 16 +- plugins/action/zos_job_submit.py | 16 ++ plugins/module_utils/job.py | 16 +- plugins/modules/zos_job_submit.py | 126 ++++++++---- .../modules/test_zos_job_submit_func.py | 192 +++++++++++++++++- 7 files changed, 350 insertions(+), 77 deletions(-) create mode 100644 changelogs/fragments/683-zos_job_submit-bugs.yml diff --git a/changelogs/fragments/683-zos_job_submit-bugs.yml b/changelogs/fragments/683-zos_job_submit-bugs.yml new file mode 100644 index 000000000..b77fbdbc9 --- /dev/null +++ b/changelogs/fragments/683-zos_job_submit-bugs.yml @@ -0,0 +1,35 @@ +bugfixes: +- zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that a + stack trace would result in the response, issue 623. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is purged by the system that a + stack trace would result in the response, issue 681. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue where the response did not include the + job log when a non-zero return code would occur, issue 655. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when resources (data sets) identified in JCL + did not exist such that a stack trace would result in the response, issue 624. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would + result in a `type` error that a stack trace would result in the response, + issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job encounters a security exception no + job log would would result in the response, issue 684. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` + to return a response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that no job log would result in the response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +trivial: +- zos_job_submit - Update documentation to for deprecated `wait` option and + expand on the `wait_time_s` description, issue 670. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Update documentation to describing the significance of '?' + for the 'ret_code' properties 'msg_text', 'msg_code' and 'msg', issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_operator - Update restructured text to include the updated examples. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) \ No newline at end of file diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index bcf0c5383..bb438f8a5 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -16,9 +16,9 @@ zos_job_submit -- Submit JCL Synopsis -------- -- Submit JCL from DATA_SET , USS, or LOCAL location. -- Submit a job and optionally monitor for its execution. -- Optionally wait a designated time until the job finishes. +- Submit JCL from a data set, USS, or from the controller. +- Submit a job and optionally monitor for completion. +- Optionally, wait a designated time until the job finishes. - For an uncataloged dataset, specify the volume serial number. @@ -32,7 +32,7 @@ Parameters src The source file or data set containing the JCL to submit. - It could be physical sequential data set or a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") + It could be a physical sequential data set, a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") Or a USS file. (e.g "/u/tester/demo/sample.jcl") @@ -58,20 +58,20 @@ location wait - Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. + Setting this option will yield no change, it is deprecated. There is no no need to set *wait*; setting *wait_times_s* is the correct way to configure the amount of tme to wait for a job to execute. - Setting this option will yield no change, it is deprecated. + Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. - See option ``wait_time_s``. + See option *wait_time_s*. | **required**: False | **type**: bool wait_time_s - When *wait* is true, the module will wait for the number of seconds for Job completion. + Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - User can set the wait time manually with this option. + *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. | **required**: False | **type**: int @@ -100,7 +100,7 @@ volume When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for USS and LOCAL. + Ignored for *location=USS* and *location=LOCAL*. | **required**: False | **type**: str @@ -548,18 +548,18 @@ jobs } msg - Return code resulting from the job submission. + Return code resulting from the job submission. Jobs that take longer to assign a value can have a value of '?'. | **type**: str | **sample**: CC 0000 msg_code - Return code extracted from the `msg` so that it can be evaluated as a string. + Return code extracted from the `msg` so that it can be evaluated as a string. Jobs that take longer to assign a value can have a value of '?'. | **type**: str msg_txt - Returns additional information related to the job. + Returns additional information related to the job. Jobs that take longer to assign a value can have a value of '?'. | **type**: str | **sample**: The job completion code (CC) was not available in the job output, please review the job log." diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 7742e60cd..868c78a10 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -59,10 +59,12 @@ wait_time_s wait - Configuring wait used by the `zos_operator <./zos_operator.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. + Configuring wait used by the `zos_operator <./zos_operator.html>`_ module has been deprecated and will be removed in a future ibm.ibm_zos_core collection. Setting this option will yield no change, it is deprecated. + Review option *wait_time_s* to instruct operator commands to wait. + | **required**: False | **type**: bool | **default**: True @@ -76,13 +78,13 @@ Examples .. code-block:: yaml+jinja - - name: Execute an operator command to show active jobs + - name: Execute an operator command to show device status and allocation zos_operator: - cmd: 'd u,all' + cmd: 'd u' - - name: Execute an operator command to show active jobs with verbose information + - name: Execute an operator command to show device status and allocation with verbose information zos_operator: - cmd: 'd u,all' + cmd: 'd u' verbose: true - name: Execute an operator command to purge all job logs (requires escaping) @@ -91,12 +93,12 @@ Examples - name: Execute operator command to show jobs, waiting up to 5 seconds for response zos_operator: - cmd: 'd u,all' + cmd: 'd a,all' wait_time_s: 5 - name: Execute operator command to show jobs, always waiting 7 seconds for response zos_operator: - cmd: 'd u,all' + cmd: 'd a,all' wait_time_s: 7 diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index dd4d8e06f..7247f6b7b 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -136,4 +136,20 @@ def run(self, tmp=None, task_vars=None): ) ) + def delete_dict_entries(entries, dictionary): + """ Deletes entries from a dictionary when provided key and dictionary. + + Arguments: + entries (tuple) - entries to delete from dictionary + dictionary (dic) - dictionary to remove entries + """ + for key in entries: + if key in dictionary: + del dictionary[key] + + # Currently the direction is undecided if we should continue to use the + # community action plugins or transition to SFTP, so this code + # can remain should we want to clean up unrelated response values. + # entries = ('checksum', 'dest', 'gid', 'group', 'md5sum', 'mode', 'owner', 'size', 'src', 'state', 'uid') + # delete_dict_entries(entries, result) return result diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d7c156673..478a605e5 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -35,10 +35,13 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, """Get the output from a z/OS job based on various search criteria. Keyword Arguments: - job_id {str} -- The job ID to search for (default: {None}) - owner {str} -- The owner of the job (default: {None}) - job_name {str} -- The job name search for (default: {None}) - dd_name {str} -- The data definition to retrieve (default: {None}) + job_id (str) -- The job ID to search for (default: {None}) + owner (str) -- The owner of the job (default: {None}) + job_name (str) -- The job name search for (default: {None}) + dd_name (str) -- The data definition to retrieve (default: {None}) + duration (int) -- The time the submitted job ran for + timeout (int) - how long to wait in seconds for a job to complete + start_time (int) - time the JCL started its submission Returns: list[dict] -- The output information for a list of jobs matching specified criteria. @@ -220,7 +223,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"] = {} job["ret_code"]["msg"] = entry.status + " " + entry.rc job["ret_code"]["msg_code"] = entry.rc - job["ret_code"]["code"] = "" + # Why was this set to an empty string? + job["ret_code"]["code"] = None if len(entry.rc) > 0: if entry.rc.isdigit(): job["ret_code"]["code"] = int(entry.rc) @@ -312,7 +316,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"]["msg"] = tmptext.strip() job["ret_code"]["msg_code"] = None job["ret_code"]["code"] = None - if len(list_of_dds) > 1: + if len(list_of_dds) > 0: # The duration should really only be returned for job submit but the code # is used job_output as well, for now we can ignore this point unless # we want to offer a wait_time_s for job output which might be reasonable. diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 6b3df1506..a58e138a1 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -25,9 +25,9 @@ - "Demetrios Dimatos (@ddimatos)" short_description: Submit JCL description: - - Submit JCL from DATA_SET , USS, or LOCAL location. - - Submit a job and optionally monitor for its execution. - - Optionally wait a designated time until the job finishes. + - Submit JCL from a data set, USS, or from the controller. + - Submit a job and optionally monitor for completion. + - Optionally, wait a designated time until the job finishes. - For an uncataloged dataset, specify the volume serial number. version_added: "1.0.0" options: @@ -36,7 +36,7 @@ type: str description: - The source file or data set containing the JCL to submit. - - It could be physical sequential data set or a partitioned data set + - It could be a physical sequential data set, a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") - Or a USS file. (e.g "/u/tester/demo/sample.jcl") - Or a LOCAL file in ansible control node. @@ -59,17 +59,23 @@ default: false type: bool description: + - Setting this option will yield no change, it is deprecated. There is no + no need to set I(wait); setting I(wait_times_s) is the correct way to + configure the amount of tme to wait for a job to execute. - Configuring wait used by the L(zos_job_submit,./zos_job_submit.html) module has been deprecated and will be removed in ibm.ibm_zos_core collection. - - Setting this option will yield no change, it is deprecated. - - See option ``wait_time_s``. + - See option I(wait_time_s). wait_time_s: required: false default: 10 type: int description: - - When I(wait) is true, the module will wait for the number of seconds for Job completion. - - User can set the wait time manually with this option. + - Option I(wait_time_s) is the total time that module + L(zos_job_submit,./zos_job_submit.html) will wait for a submitted job + to complete. The time begins when the module is executed on the managed + node. + - I(wait_time_s) is measured in seconds and must be a value greater than 0 + and less than 86400. max_rc: required: false type: int @@ -91,7 +97,7 @@ - When configured, the L(zos_job_submit,./zos_job_submit.html) will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - - Ignored for USS and LOCAL. + - Ignored for I(location=USS) and I(location=LOCAL). encoding: description: - Specifies which encoding the local JCL file should be converted from @@ -218,18 +224,21 @@ contains: msg: description: - Return code resulting from the job submission. + Return code resulting from the job submission. Jobs that take + longer to assign a value can have a value of '?'. type: str sample: CC 0000 msg_code: description: Return code extracted from the `msg` so that it can be evaluated - as a string. + as a string. Jobs that take longer to assign a value can have a + value of '?'. type: str sample: 0000 msg_txt: description: - Returns additional information related to the job. + Returns additional information related to the job. Jobs that take + longer to assign a value can have a value of '?'. type: str sample: The job completion code (CC) was not available in the job output, please review the job log." @@ -580,29 +589,31 @@ JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) -JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) +JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) MAX_WAIT_TIME_S = 86400 -def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=timer()): +def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, start_time=timer()): """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. Arguments: module - module instnace to access the module api - src (str) - JCL, can be relative or absolute paths either on controller or USS - - Data set, can be PS, PDS, PDSE Member - timeout (int) - how long to wait in seconds for a job to complete - hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all - JCL local to a controller is transfered to USS thus would be - True - volume (str) - volume the data set JCL is located on that will be cataloged before - being submitted - start_time - time the JCL started its submission + src (str) - JCL, can be relative or absolute paths either on controller or USS + - Data set, can be PS, PDS, PDSE Member + src_name (str) - the src name that was provided in the module because through + the runtime src could be replace with a temporary file name + timeout (int) - how long to wait in seconds for a job to complete + hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all + JCL local to a controller is transfered to USS thus would be + True + volume (str) - volume the data set JCL is located on that will be cataloged before + being submitted + start_time - time the JCL started its submission Returns: - job_submitted_id - the JCL job ID returned from submitting a job, else if no - job submits, None will be returned - duration - how long the job ran for in this method + job_submitted_id - the JCL job ID returned from submitting a job, else if no + job submits, None will be returned + duration - how long the job ran for in this method """ kwargs = { @@ -652,6 +663,7 @@ def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=tim # drop through and get analyzed in the main as it will scan the job ouput # Any match to JOB_ERROR_MESSAGES ends our processing and wait times while (job_listing_status not in JOB_ERROR_MESSAGES and + job_listing_status == 'AC' and ((job_listing_rc is None or len(job_listing_rc) == 0 or job_listing_rc == '?') and duration < timeout)): current_time = timer() @@ -660,16 +672,20 @@ def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=tim job_listing_rc = jobs.listing(job_submitted.id)[0].rc job_listing_status = jobs.listing(job_submitted.id)[0].status - # ZOAU throws a ZOAUException when the job sumbission fails, not when the - # JCL is non-zero, for non-zero JCL RCs that is caught in the job_output - # processing + # ZOAU throws a ZOAUException when the job sumbission fails thus there is no + # JCL RC to share with the user, if there is a RC, that will be processed + # in the job_output parser. except ZOAUException as err: result["changed"] = False result["failed"] = True result["stderr"] = str(err) - result["msg"] = ("Unable to submit job {0}, a job sumission has returned " - "a non-zero return code, please review the standard error " - "and contact a system administrator.".format(src)) + result["duration"] = duration + result["job_id"] = job_submitted.id if job_submitted else None + result["msg"] = ("Unable to submit job {0}, the job submission has failed. " + "Without the job id, the error can not be determined. " + "Consider using module `zos_job_query` to poll for the " + "job by name or review the system log for purged jobs " + "resulting from an abend.".format(src_name)) module.fail_json(**result) # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id @@ -684,7 +700,29 @@ def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=tim "within the allocated time of {1} seconds. Consider using " " module zos_job_query to poll for a long running " "jobs or increasing the value for " - "'wait_times_s`.".format(src, str(timeout))) + "`wait_times_s`.".format(src_name, str(timeout))) + module.fail_json(**result) + + # Between getting a job_submitted and the jobs.listing(job_submitted.id)[0].rc + # is enough time for the system to purge an invalid job, so catch it and let + # it fall through to the catchall. + except IndexError: + job_submitted = None + + # There appears to be a small fraction of time when ZOAU has a handle on the + # job and and suddenly its purged, this check is to ensure the job is there + # long after the purge else we throw an error here if its been purged. + if job_submitted is None: + result["changed"] = False + result["failed"] = True + result["duration"] = duration + result["job_id"] = job_submitted.id if job_submitted else None + result["msg"] = ("The job {0} has been submitted and no job id was returned " + "within the allocated time of {1} seconds. Without the " + "job id, the error can not be determined, consider using " + "module `zos_job_query` to poll for the job by name or " + "review the system log for purged jobs resulting from an " + "abend.".format(src_name, str(timeout))) module.fail_json(**result) return job_submitted.id if job_submitted else None, duration @@ -786,27 +824,28 @@ def run_module(): # temporary file names for copied files when user sets location to LOCAL temp_file = parsed_args.get("temp_file") temp_file_encoded = None - if temp_file: - temp_file_encoded = NamedTemporaryFile(delete=True) # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) if wait_time_s <= 0 or wait_time_s > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option wait_time_s is not valid, it must " - "be greater than 0 and less than " + MAX_WAIT_TIME_S) + result["msg"] = ("The value for option `wait_time_s` is not valid, it must " + "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) + if temp_file: + temp_file_encoded = NamedTemporaryFile(delete=True) + job_submitted_id = None duration = 0 start_time = timer() if location == "DATA_SET": job_submitted_id, duration = submit_src_jcl( - module, src, wait_time_s, False, volume, start_time=start_time) + module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) elif location == "USS": - job_submitted_id, duration = submit_src_jcl(module, src, wait_time_s, True) + job_submitted_id, duration = submit_src_jcl(module, src, src_name=src, timeout=wait_time_s, hfs=True) else: # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( @@ -823,7 +862,7 @@ def run_module(): if conv_rc == 0: job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, wait_time_s, True) + module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) else: result["failed"] = True result["stdout"] = stdout @@ -847,6 +886,8 @@ def run_module(): if duration >= wait_time_s: result["failed"] = True result["changed"] = False + if job_output_txt is not None: + result["jobs"] = job_output_txt result["msg"] = ( "The JCL submitted with job id {0} but appears to be a long " "running job that exceeded its maximum wait time of {1} " @@ -860,6 +901,7 @@ def run_module(): is_changed = True if job_output_txt: + result["jobs"] = job_output_txt job_ret_code = job_output_txt[0].get("ret_code") if job_ret_code: @@ -893,8 +935,6 @@ def run_module(): raise Exception("The job return code {0} was non-zero in the " "job output, this job has failed.".format(str(job_code))) - result["jobs"] = job_output_txt - if not return_output: for job in result.get("jobs", []): job["ddnames"] = [] @@ -914,7 +954,7 @@ def run_module(): result["changed"] = False result["msg"] = ("The JCL submitted with job id {0} but " "there was an error, please review " - "the error for further details: {1}.".format + "the error for further details: {1}".format (str(job_submitted_id), str(err))) module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 3106aa292..888281712 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -19,8 +19,24 @@ import tempfile import pytest import re +from pprint import pprint -JCL_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, + + +# ############################################################################## +# Configure the job card as needed, most common keyword parameters: +# CLASS: Used to achieve a balance between different types of jobs and avoid +# contention between jobs that use the same resources. +# MSGLEVEL: controls hpw the allocation messages and termination messages are +# printed in the job's output listing (SYSOUT). +# MSGCLASS: assign an output class for your output listing (SYSOUT) +# ############################################################################## + +JCL_FILE_CONTENTS = """//* +//****************************************************************************** +//* Happy path job that prints hello world, returns RC 0 as is. +//****************************************************************************** +//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY @@ -31,7 +47,13 @@ //SYSUT2 DD SYSOUT=* // """ -JCL_FILE_CONTENTS_R = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, + +JCL_FILE_CONTENTS_BACKSLASH_R = """//* +//****************************************************************************** +//* Happy path job containing backslash r's, returns RC 0 after +//* zos_job_sbumit strips backslash r's, prints Hello world. +//****************************************************************************** +//HELLOR JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY @@ -42,7 +64,18 @@ //SYSUT2 DD SYSOUT=* // """ -JCL_FILE_CONTENTS_BAD = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, + +JCL_FILE_CONTENTS_BAD = """//* +//****************************************************************************** +//* Negative path job containing !!'s. +//* Returns: +//* ret_code->(code=null, msg=JCL ERROR <int>, msg_text=JCLERR) +//* msg --> The JCL submitted with job id JOB00604 but there was an error, +//* please review the error for further details: The job completion +//* code (CC) was not in the job log. Please review the error +//* JCL ERROR 555 and the job log.", +//****************************************************************************** +//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY @@ -54,7 +87,7 @@ // """ -JCL_FILE_CONTENTS_30_SEC = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +JCL_FILE_CONTENTS_30_SEC = """//SLEEP30 JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //USSCMD EXEC PGM=BPXBATCH //STDERR DD SYSOUT=* //STDOUT DD SYSOUT=* @@ -65,7 +98,7 @@ // """ -JCL_FILE_CONTENTS_05_SEC = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +JCL_FILE_CONTENTS_05_SEC = """//SLEEP05 JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //USSCMD EXEC PGM=BPXBATCH //STDERR DD SYSOUT=* //STDOUT DD SYSOUT=* @@ -75,6 +108,7 @@ /* // """ + # Should return a max RC of 8 JCL_FILE_CONTENTS_RC_8 = """//RCBADJCL JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //S1 EXEC PGM=IDCAMS @@ -84,6 +118,105 @@ /* """ +JCL_FILE_CONTENTS_NO_DSN = """//* +//****************************************************************************** +//* Job containing a non existent DSN that will force an error. +//* Returns: +//* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) +//* msg --> The JCL submitted with job id JOB00532 but there was an error, +//* please review the error for further details: The job completion +//* code (CC) was not in the job log. Please review the error +//* JCLERR ? and the job log.", +//****************************************************************************** +//JOBLIBPM JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//JOBLIB DD DSN=DATASET.NOT.EXIST,DISP=SHR +//STEP1 EXEC PGM=HELLOPGM +//SYSPRINT DD SYSOUT=* +//SYSOUT DD SYSOUT=* +// +""" + +# Do not use this test case, although its fine, the problem is it does not trigger +# the correct behavior because ZOAU has a bug such that it will return the last +# job when it can not find what we requested, so this causes the wrong job +# go be found and analyzed. See JCL_FILE_CONTENTS_JCL_ERROR_INT that does actually +# force the code properly find the correct job. +# Fix coming in zoau 1.2.3 +# JCL_FILE_CONTENTS_NO_JOB_CARD = """//STEP0001 EXEC PGM=IEBGENER +# //SYSIN DD DUMMY +# //SYSPRINT DD SYSOUT=* +# //SYSUT1 DD * +# HELLO, WORLD +# /* +# //SYSUT2 DD SYSOUT=* +# // +# """ + + +JCL_FILE_CONTENTS_JCL_ERROR_INT = """//* +//****************************************************************************** +//* Another job containing no job card resulting in a JCLERROR with an value. It +//* won't always be 952, it will increment. +//* Returns: +//* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) +//* msg --> The JCL submitted with job id JOB00728 but there was an error, +//* please review the error for further details: The job completion +//* code (CC) was not in the job log. Please review the error +//* JCL ERROR 952 and the job log. +//****************************************************************************** +//CLGP JOB +//CLG EXEC IGYWCLG +//COBOL.SYSIN DD DSN=IBMUSER.ANSIBLE.COBOL(HELLO),DISP=SHR +""" + +JCL_FILE_CONTENTS_INVALID_USER = """//* +//****************************************************************************** +//* Job containing a USER=FOOBAR that will cause JES to return a SEC ERROR which +//* is a security error. +//* Returns: +//* ret_code->(code=null, msg=SEC ?, msg_text=SEC, msg_code=?) +//* msg --> The JCL submitted with job id JOB00464 but there was an error, +//* please review the error for further details: The job return code +//* was not available in the job log, please review the job log +//* and error SEC ?.", +//****************************************************************************** +//INVUSER JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,USER=FOOBAR +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD +/* +//SYSUT2 DD SYSOUT=* +// +""" + + +JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* +//****************************************************************************** +//* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and +//* not actually run the JCL. +//* Returns: +//* ret_code->(code=null, msg=? ?, msg_text=?, msg_code=?) +//* msg --> The JCL submitted with job id JOB00620 but there was an error, +//* please review the error for further details: The job return code +//* was not available in the job log, please review the job log +//* and error ? ?.", +//****************************************************************************** +//TYPESCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD +/* +//SYSUT2 DD SYSOUT=* +// +""" + + TEMP_PATH = "/tmp/jcl" DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -177,7 +310,7 @@ def test_job_submit_LOCAL(ansible_zos_module): def test_job_submit_LOCAL_extraR(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_R) + f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) @@ -262,6 +395,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): try: hosts = ansible_zos_module @@ -375,6 +509,48 @@ def test_job_submit_max_rc(ansible_zos_module, args): assert result.get("msg") is None assert result.get('changed') is False assert result.get("jobs")[0].get("ret_code").get("code") < 12 - finally: - hosts.all.file(path=tmp_file.name, state="absent") \ No newline at end of file + hosts.all.file(path=tmp_file.name, state="absent") + + +def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_NO_DSN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'completion code', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + + +# Should have a JCL ERROR <int> +def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_INVALID_USER) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error SEC', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + +def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error ? ?', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" From cbfc4cbd33206498c16252553e7dc4d0bfc75c62 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 30 Mar 2023 14:53:37 -0600 Subject: [PATCH 063/413] Added uss_tag_encoding function --- plugins/module_utils/encode.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 7ecabbb5a..9e2ab1d89 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -453,6 +453,23 @@ def mvs_convert_encoding( return convert_rc + def uss_tag_encoding(self, file_path, tag): + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) + def uss_file_tag(self, file_path): """Returns the current tag set for a file. Arguments: From 22517bc41a6dbe4cc4f05e93dffc70d91ee629dd Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 30 Mar 2023 15:11:58 -0600 Subject: [PATCH 064/413] Fixing linter issues --- plugins/module_utils/encode.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 9e2ab1d89..cfcfd2bf0 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -454,21 +454,21 @@ def mvs_convert_encoding( return convert_rc def uss_tag_encoding(self, file_path, tag): - """Tag the file/directory specified with the given code set. - If `file_path` is a directory, all of the files and subdirectories will - be tagged recursively. - Arguments: - file_path {str} -- Absolute file path to tag. - tag {str} -- Code set to tag the file/directory. - Raises: - TaggingError: When the chtag command fails. - """ - is_dir = os.path.isdir(file_path) - - tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) - rc, out, err = self.module.run_command(tag_cmd) - if rc != 0: - raise TaggingError(file_path, tag, rc, out, err) + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) def uss_file_tag(self, file_path): """Returns the current tag set for a file. From cf793d900c5ae2a999a64f63fe8ba67acac3ab2b Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 3 Apr 2023 12:06:19 -0400 Subject: [PATCH 065/413] removed extraneous comment on query, eliminated unused variable --- plugins/module_utils/job.py | 2 -- plugins/modules/zos_job_query.py | 8 +------- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 00ec0407f..c870573a6 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -208,8 +208,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= if owner != entry.owner: continue if job_name != "*": - # if job_name != entry.name: - # continue if not fnmatch.fnmatch(entry.name, job_name): continue if job_id_temp is not None: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 22968bc62..56646055a 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -227,7 +227,6 @@ def validate_arguments(params): job_name_final = job_name_in job_id = params.get("job_id") - job_id_final = job_id owner = params.get("owner") if job_name_in or job_id: @@ -251,8 +250,6 @@ def validate_arguments(params): o = job_name_pattern.search(job_name_short) if not o: o = job_name_pattern_with_star.search(job_name_short) - if o: - job_name_final = job_name_short # so now, fail if neither m, n, or o=m/n(short) found a match if not o: @@ -272,9 +269,6 @@ def validate_arguments(params): if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: o = job_id_short - if o: - job_id_final = job_id_short + '*' - if not m and not o: raise RuntimeError("Failed to validate the job id: " + job_id) else: @@ -282,7 +276,7 @@ def validate_arguments(params): if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") - # return job_name_final, job_id_final, owner; + # return job_name_final, id, owner; return job_name_in, job_id, owner From 67448dbe8a93e3f2bc3a62c5e84cd6d7868158c5 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 3 Apr 2023 13:40:22 -0400 Subject: [PATCH 066/413] responding to reviewer comments --- ...os-job-query-handle-multiple-wildcards.yml | 4 +-- plugins/modules/zos_job_query.py | 33 +++++++++---------- .../modules/test_zos_job_query_func.py | 11 ++++++- 3 files changed, 28 insertions(+), 20 deletions(-) diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml index ae2871b9f..a35827e24 100644 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -1,4 +1,4 @@ -enhancements: +minor_changes: - zos_job_query - This enhancement adjusts the job_name and job_id parameters to handle embedded wildcards. This also required change to job.py/_get_job_status to follow the wildcard feature, using fnmatch logic. - (https://github.com/ansible-collections/ibm_zos_core/pull/---) + (https://github.com/ansible-collections/ibm_zos_core/pull/721) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 56646055a..646032c5a 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -224,7 +224,6 @@ def run_module(): # validate_arguments rturns a tuple, so we don't have to rebuild the job_name string def validate_arguments(params): job_name_in = params.get("job_name") - job_name_final = job_name_in job_id = params.get("job_id") @@ -235,48 +234,48 @@ def validate_arguments(params): job_name_pattern_with_star = re.compile( r"^[a-zA-Z$#@%][0-9a-zA-Z$#@%]{0,6}\*$" ) - m = job_name_pattern.search(job_name_in) - n = job_name_pattern_with_star.search(job_name_in) + test_basic = job_name_pattern.search(job_name_in) + test_star = job_name_pattern_with_star.search(job_name_in) # logic twist: o must be non-null value from m or n - o = m - if n: - o = n + test_result = test_basic + if test_star: + test_result = test_star + job_name_short = "unused" # if neither m nor n were non-null, check if the string needed to be truncated to the first * - if not o: + if not test_result: ix = job_name_in.find("*") if ix >= 0: job_name_short = job_name_in[0:ix + 1] - o = job_name_pattern.search(job_name_short) - if not o: - o = job_name_pattern_with_star.search(job_name_short) + test_result = job_name_pattern.search(job_name_short) + if not test_result: + test_result = job_name_pattern_with_star.search(job_name_short) # so now, fail if neither m, n, or o=m/n(short) found a match - if not o: + if not test_result: raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") - m = job_id_pattern.search(job_id) - o = None + test_basic = job_id_pattern.search(job_id) + test_result = None - if not m: + if not test_basic: ix = job_id.find("*") if ix > 0: # this differs from job_name, in that we'll drop the star for the search job_id_short = job_id[0:ix] if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: - o = job_id_short + test_result = job_id_short - if not m and not o: + if not test_basic and not test_result: raise RuntimeError("Failed to validate the job id: " + job_id) else: raise RuntimeError("Argument Error:Either job name(s) or job id is required") if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") - # return job_name_final, id, owner; return job_name_in, job_id, owner diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 32914731c..947b79c70 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -35,7 +35,16 @@ def test_zos_job_query_func(ansible_zos_module): assert result.get("jobs") is not None -# test to show multi wildcard won't crash the search +# test to show multi wildcard in Job_id query won't crash the search +def test_zos_job_query_multi_wildcards_func(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_job_query(job_id="STC*3*") + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs") is not None + +# test to show multi wildcard in Job_name query won't crash the search def test_zos_job_query_multi_wildcards_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="JOB*1*", owner="*") From 9b5f063d4c62eca730837419c2141f2bebf8b32c Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 5 Apr 2023 16:04:50 -0400 Subject: [PATCH 067/413] Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. --- plugins/modules/zos_job_query.py | 6 +- .../modules/test_zos_job_query_func.py | 95 +++++++++++++++---- 2 files changed, 80 insertions(+), 21 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 646032c5a..c7758da33 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -236,13 +236,13 @@ def validate_arguments(params): ) test_basic = job_name_pattern.search(job_name_in) test_star = job_name_pattern_with_star.search(job_name_in) - # logic twist: o must be non-null value from m or n + # logic twist: test_result should be a non-null value from test_basic or test_star test_result = test_basic if test_star: test_result = test_star job_name_short = "unused" - # if neither m nor n were non-null, check if the string needed to be truncated to the first * + # if neither test_basic nor test_star were non-null, check if the string needed to be truncated to the first * if not test_result: ix = job_name_in.find("*") if ix >= 0: @@ -251,7 +251,7 @@ def validate_arguments(params): if not test_result: test_result = job_name_pattern_with_star.search(job_name_short) - # so now, fail if neither m, n, or o=m/n(short) found a match + # so now, fail if neither test_basic, test_star or test_base from job_name_short found a match if not test_result: raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 947b79c70..f0e53a556 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -15,17 +15,16 @@ __metaclass__ = type -import os -import sys -import warnings - import ansible.constants import ansible.errors import ansible.utils import pytest from pprint import pprint +from shellescape import quote +import tempfile +# Make sure job list * returns something def test_zos_job_query_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="*", owner="*") @@ -34,21 +33,81 @@ def test_zos_job_query_func(ansible_zos_module): assert result.get("changed") is False assert result.get("jobs") is not None +JCLQ_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD +/* +//SYSUT2 DD SYSOUT=* +// +""" + +TEMP_PATH = "/tmp/jcl" +JDATA_SET_NAME = "imstestl.ims1.testq1" +NDATA_SET_NAME = "imstestl.ims1.testq2" +DEFAULT_VOLUME = "000000" # test to show multi wildcard in Job_id query won't crash the search -def test_zos_job_query_multi_wildcards_func(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_job_query(job_id="STC*3*") - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("jobs") is not None +def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) + ) + hosts.all.zos_data_set( + name=JDATA_SET_NAME, state="present", type="pds", replace=True + ) + hosts.all.shell( + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) + ) + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait=True + ) + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + fulljobid = result.get("jobs")[0].get("res_code").get("job_id") + jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' + qresults = hosts.all.zos_job_query(jobmask) + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None + + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=JDATA_SET_NAME, state="absent") + # test to show multi wildcard in Job_name query won't crash the search -def test_zos_job_query_multi_wildcards_func(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_job_query(job_name="JOB*1*", owner="*") - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("jobs") is not None \ No newline at end of file +def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) + ) + hosts.all.zos_data_set( + name=NDATA_SET_NAME, state="present", type="pds", replace=True + ) + hosts.all.shell( + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) + ) + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait=True + ) + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + jobname = "HE*L*" + qresults = hosts.all.zos_job_query(job_name=jobname, owner="*") + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None + + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=NDATA_SET_NAME, state="absent") From 8c716d32d27fe6fa94de2f073c7ddb3577a4d6cb Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 5 Apr 2023 16:23:52 -0400 Subject: [PATCH 068/413] Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. --- plugins/modules/zos_job_query.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index c7758da33..64e3ad09b 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -35,6 +35,7 @@ job_name: description: - The job name to query. + - Job name can now contain multiple, embedded asterisks (e.g.: JC*NAM*) type: str required: False default: "*" @@ -51,6 +52,7 @@ with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with S, J, T and are followed by 7 digits. + - Job id can now contain multiple, embedded asterisks (e.g.: JOB*14*) type: str required: False """ @@ -64,6 +66,14 @@ zos_job_query: job_name: "IYK3*" +- name: list the jobs that match 'IYKsomethingNAsomething' + zos_job_query: + job_name: "IYK*NA*" + +- name: list the jobs with JOB in the x014x range only + zos_job_query: + job_idname: JOB*014* + - name: list the job with a jobname 'IYK3ZNA*' and jobid as JOB01427 zos_job_query: job_name: IYK3ZNA* @@ -253,7 +263,7 @@ def validate_arguments(params): # so now, fail if neither test_basic, test_star or test_base from job_name_short found a match if not test_result: - raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) + raise RuntimeError("Unable to locate job name {0}.".format(job_name_in)) if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") From d5fc637474fac83f5b7669ce39483ab7aca74be9 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 09:55:04 -0400 Subject: [PATCH 069/413] Corrected 2 documentation errors --- plugins/modules/zos_job_query.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 64e3ad09b..ae6c5a9ac 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -34,8 +34,8 @@ options: job_name: description: - - The job name to query. - - Job name can now contain multiple, embedded asterisks (e.g.: JC*NAM*) + - The job name to query. Job name can now contain multiple, + embedded asterisks (e.g.: JC*NAM*) type: str required: False default: "*" @@ -51,8 +51,8 @@ - The job number that has been assigned to the job. These normally begin with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with - S, J, T and are followed by 7 digits. - - Job id can now contain multiple, embedded asterisks (e.g.: JOB*14*) + S, J, T and are followed by 7 digits. Job id can now contain multiple, + embedded asterisks (e.g.: JOB*14*) type: str required: False """ From 5f56158619ef4e774eb465a761eb3aa9917a8214 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 10:04:55 -0400 Subject: [PATCH 070/413] Change to documentation text (indent on multi line string?) --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index ae6c5a9ac..2a149cb46 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -35,7 +35,7 @@ job_name: description: - The job name to query. Job name can now contain multiple, - embedded asterisks (e.g.: JC*NAM*) + embedded asterisks (e.g.: JC*NAM*) type: str required: False default: "*" From baaabe874700ad29e1e889c552df3978b7d953ed Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 10:14:47 -0400 Subject: [PATCH 071/413] Still trying to get documentation to pass --- plugins/modules/zos_job_query.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 2a149cb46..7b383d668 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -34,8 +34,7 @@ options: job_name: description: - - The job name to query. Job name can now contain multiple, - embedded asterisks (e.g.: JC*NAM*) + - The job name to query. Job name can now contain multiple embedded asterisks. type: str required: False default: "*" @@ -52,7 +51,7 @@ with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with S, J, T and are followed by 7 digits. Job id can now contain multiple, - embedded asterisks (e.g.: JOB*14*) + embedded asterisks. type: str required: False """ From 5e6cc4c3c6fcb2e1f3387f532808ed93749b264a Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 10:24:06 -0400 Subject: [PATCH 072/413] Looks like '---' was killing documentation block. --- plugins/modules/zos_job_query.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 7b383d668..3870440d8 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -18,7 +18,6 @@ DOCUMENTATION = r""" ---- module: zos_job_query version_added: '1.0.0' short_description: Query job status From 165863093ed66b94cff987f35320e5fee16a7028 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 7 Apr 2023 10:20:41 -0700 Subject: [PATCH 073/413] Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../727-zos-blockinfile-examples.yml | 5 +++ docs/source/modules/zos_blockinfile.rst | 38 +++++++++++++++++-- plugins/modules/zos_blockinfile.py | 38 +++++++++++++++++-- 3 files changed, 75 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/727-zos-blockinfile-examples.yml diff --git a/changelogs/fragments/727-zos-blockinfile-examples.yml b/changelogs/fragments/727-zos-blockinfile-examples.yml new file mode 100644 index 000000000..f1c94c12b --- /dev/null +++ b/changelogs/fragments/727-zos-blockinfile-examples.yml @@ -0,0 +1,5 @@ +trivial: +- zos_blockinfile - was missing examples using Jinja2 and files. This change + adds a Jinja2 example in both the src and block content. It also includes + an example using a file as source. + (https://github.com/ansible-collections/ibm_zos_core/pull/727) \ No newline at end of file diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 6e6aae737..5608a0ebb 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -38,7 +38,9 @@ src state - Whether the block should be inserted/replaced (present) or removed (absent). + Whether the block should be inserted or replaced using *state=present*. + + Whether the block should be removed using *state=absent*. | **required**: False | **type**: str @@ -165,7 +167,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The ``-f`` option enables sharing of data sets through the disposition *DISP=SHR*. + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. | **required**: False | **type**: bool @@ -244,6 +246,36 @@ Examples LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 + - name: Set facts for the following two tasks. + set_fact: + HLQ: 'ANSIBLE' + MLQ: 'MEMBER' + LLQ: 'TEST' + MEM: '(JCL)' + MSG: 'your first JCL program' + CONTENT: "{{ lookup('file', 'files/content.txt') }}" + + - name: Update JCL in a PDS member with Jinja2 variable syntax. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "HELLO, WORLD" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 1" + marker_end: "End Ansible Block Insertion 1" + state: present + block: | + This is {{ MSG }}, and its now + managed by Ansible. + + - name: Update JCL in PDS member with content from a file. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "End Ansible Block Insertion 1" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 2" + marker_end: "End Ansible Block Insertion 2" + block: "{{ CONTENT }}" + @@ -257,7 +289,7 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - When using 'with_*' loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. + When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. When more then one block should be handled in a file you must change the *marker* per task. diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index b7bda8211..9beceab68 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -22,6 +22,7 @@ version_added: '1.3.0' author: - "Behnam (@balkajbaf)" + - "Demetrios Dimatos (@ddimatos)" short_description: Manage block of multi-line textual data on z/OS description: - Manage block of multi-lines in z/OS UNIX System Services (USS) files, @@ -42,7 +43,8 @@ required: true state: description: - - Whether the block should be inserted/replaced (present) or removed (absent). + - Whether the block should be inserted or replaced using I(state=present). + - Whether the block should be removed using I(state=absent). type: str choices: - absent @@ -156,7 +158,7 @@ updated by others. - This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - - The C(-f) option enables sharing of data sets through the disposition + - The C(force) option enables sharing of data sets through the disposition I(DISP=SHR). required: false type: bool @@ -179,7 +181,7 @@ data sets. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - When using 'with_*' loops be aware that if you do not set a unique mark + - When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - When more then one block should be handled in a file you must change the I(marker) per task. @@ -245,6 +247,36 @@ RUN PROGRAM(DSNTEP2) PLAN(DSNTEP12) - LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 + +- name: Set facts for the following two tasks. + set_fact: + HLQ: 'ANSIBLE' + MLQ: 'MEMBER' + LLQ: 'TEST' + MEM: '(JCL)' + MSG: 'your first JCL program' + CONTENT: "{{ lookup('file', 'files/content.txt') }}" + +- name: Update JCL in a PDS member with Jinja2 variable syntax. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "HELLO, WORLD" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 1" + marker_end: "End Ansible Block Insertion 1" + state: present + block: | + This is {{ MSG }}, and its now + managed by Ansible. + +- name: Update JCL in PDS member with content from a file. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "End Ansible Block Insertion 1" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 2" + marker_end: "End Ansible Block Insertion 2" + block: "{{ CONTENT }}" ''' RETURN = r""" From 1123f97b0d2c7e39edf8d039f024fe1dd86a74c5 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 7 Apr 2023 12:58:30 -0700 Subject: [PATCH 074/413] Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/729-zos_operator-example-added.yml | 4 ++++ docs/source/modules/zos_operator.rst | 4 ++++ plugins/modules/zos_operator.py | 4 ++++ 3 files changed, 12 insertions(+) create mode 100644 changelogs/fragments/729-zos_operator-example-added.yml diff --git a/changelogs/fragments/729-zos_operator-example-added.yml b/changelogs/fragments/729-zos_operator-example-added.yml new file mode 100644 index 000000000..46cb6ab84 --- /dev/null +++ b/changelogs/fragments/729-zos_operator-example-added.yml @@ -0,0 +1,4 @@ +trivial: +- zos_operator - had a need for more command examples. This change adds the + D SYMBOLS example. + (https://github.com/ansible-collections/ibm_zos_core/pull/730) \ No newline at end of file diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 868c78a10..b05b0331a 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -101,6 +101,10 @@ Examples cmd: 'd a,all' wait_time_s: 7 + - name: Display the system symbols and associated substitution texts. + zos_operator: + cmd: 'D SYMBOLS' + diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 56f2170c5..a0f66c302 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -90,6 +90,10 @@ zos_operator: cmd: 'd a,all' wait_time_s: 7 + +- name: Display the system symbols and associated substitution texts. + zos_operator: + cmd: 'D SYMBOLS' """ RETURN = r""" From 198476984fe2297ac15aeaf3d64d0ad11079512a Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Fri, 7 Apr 2023 14:37:03 -0700 Subject: [PATCH 075/413] zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../663-zos_gather_facts-update-docstring.yml | 2 ++ docs/source/modules/zos_gather_facts.rst | 24 +++++++++++++++++++ plugins/modules/zos_gather_facts.py | 19 +++++++++++++++ 3 files changed, 45 insertions(+) create mode 100644 changelogs/fragments/663-zos_gather_facts-update-docstring.yml diff --git a/changelogs/fragments/663-zos_gather_facts-update-docstring.yml b/changelogs/fragments/663-zos_gather_facts-update-docstring.yml new file mode 100644 index 000000000..d6ba48dd7 --- /dev/null +++ b/changelogs/fragments/663-zos_gather_facts-update-docstring.yml @@ -0,0 +1,2 @@ +trivial: +- zos_gather_facts - add sample output to RETURN docstring. (https://github.com/ansible-collections/ibm_zos_core/pull/722) \ No newline at end of file diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 836421256..63bd22701 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -96,4 +96,28 @@ ansible_facts | **returned**: when collected | **type**: dict + | **sample**: + + .. code-block:: json + + [ + { + "ansible_facts": { + "arch_level": "2", + "hw_name": "SYSZD6", + "ipl_volume": "RES820", + "lpar_name": "SVLLAB01", + "primary_jes": "JES2", + "product_mod_level": "00", + "product_name": "z/OS", + "product_owner": "IBM CORP", + "product_release": "05", + "product_version": "02", + "smf_name": "3090", + "sys_name": "EC33018A", + "sysplex_name": "SVPLEX1", + "vm_name": "EC33018A" + } + } + ] diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index e18dcb288..beff12cd2 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -88,6 +88,25 @@ description: Collection of facts that are gathered from the z/OS systems. returned: when collected type: dict + sample: + [ + "ansible_facts": { + "arch_level": "2", + "hw_name": "SYSZD6", + "ipl_volume": "RES820", + "lpar_name": "SVLLAB01", + "primary_jes": "JES2", + "product_mod_level": "00", + "product_name": "z/OS", + "product_owner": "IBM CORP", + "product_release": "05", + "product_version": "02", + "smf_name": "3090", + "sys_name": "EC33018A", + "sysplex_name": "SVPLEX1", + "vm_name": "EC33018A" + } + ] """ from fnmatch import fnmatch From 455c9c099e1e3fda6ced3dc84a387a61e1aa2796 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 7 Apr 2023 19:38:42 -0400 Subject: [PATCH 076/413] 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. --- .../574-zos_find_stoppedonnotfound.yml | 4 +++ plugins/modules/zos_find.py | 3 ++- .../functional/modules/test_zos_find_func.py | 26 +++++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/574-zos_find_stoppedonnotfound.yml diff --git a/changelogs/fragments/574-zos_find_stoppedonnotfound.yml b/changelogs/fragments/574-zos_find_stoppedonnotfound.yml new file mode 100644 index 000000000..48eebe523 --- /dev/null +++ b/changelogs/fragments/574-zos_find_stoppedonnotfound.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_find - fixes a bug where find result values stopped being returned after + first value in a list was 'not found'. + (https://github.com/ansible-collections/ibm_zos_core/pull/668) diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index c290657ac..b49d65f04 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -336,7 +336,8 @@ def data_set_filter(module, pds_paths, patterns): rc, out, err = _dls_wrapper(pattern, list_details=True) if rc != 0: if "BGYSC1103E" in err: - return filtered_data_sets + # return filtered_data_sets + continue module.fail_json( msg="Non-zero return code received while executing ZOAU shell command 'dls'", diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 7349b134f..04dfb7368 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -312,3 +312,29 @@ def test_find_non_existent_data_set_members(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 + + +def test_find_mixed_members_from_pds_paths(ansible_zos_module): + hosts = ansible_zos_module + try: + hosts.all.zos_data_set( + batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] + ) + hosts.all.zos_data_set( + batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES] + ) + hosts.all.zos_data_set( + batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES] + ) + find_res = hosts.all.zos_find( + pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] + ) + print(vars(find_res)) + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) == 3 + for ds in val.get('data_sets'): + assert len(ds.get('members')) == 1 + finally: + hosts.all.zos_data_set( + batch=[dict(name=i, state='absent') for i in PDS_NAMES] + ) From 413461fd953ef14456046954552bbbcc4a7a8afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 7 Apr 2023 18:00:02 -0600 Subject: [PATCH 077/413] zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> --- ...can-quotes-in-content-can-be-supported.yml | 5 +++ plugins/modules/zos_blockinfile.py | 24 ++++++++++--- .../modules/test_zos_blockinfile_func.py | 35 +++++++++++++++++++ 3 files changed, 60 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml diff --git a/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml b/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml new file mode 100644 index 000000000..ebd99af7a --- /dev/null +++ b/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml @@ -0,0 +1,5 @@ +bugfixes: +- zos_blockinfile - was unable to use double quotes which prevented some use + cases and did not display an approriate message. The fix now allows for + double quotes to be used with the module. + (https://github.com/ansible-collections/ibm_zos_core/pull/680) \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 9beceab68..c9e504740 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -248,6 +248,14 @@ LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 +- name: Update a script with commands containing quotes. + zos_blockinfile: + src: "/u/scripts/script.sh" + insertafter: "EOF" + block: | + cat "//'{{ DS_NAME }}'" + cat "//'{{ DS_NAME_2 }}'" + - name: Set facts for the following two tasks. set_fact: HLQ: 'ANSIBLE' @@ -414,6 +422,12 @@ def quotedString(string): return string.replace('"', "") +def quoted_string_output_json(string): + if not isinstance(string, str): + return string + return string.replace('"', "u'") + + def main(): module = AnsibleModule( argument_spec=dict( @@ -570,7 +584,7 @@ def main(): # state=present, insert/replace a block with matching regex pattern # state=absent, delete blocks with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, quotedString(block), quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force) + return_content = present(src, block, quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force) else: return_content = absent(src, quotedString(marker), encoding, force) stdout = return_content.stdout_response @@ -584,13 +598,15 @@ def main(): stdout = stdout.replace('$ a\\', '$ a\\\\') stdout = stdout.replace('1 i\\', '1 i\\\\') if block: - stdout = stdout.replace(block, quotedString(block)) + stdout = stdout.replace(block, quoted_string_output_json(block)) if ins_aft: - stdout = stdout.replace(ins_aft, quotedString(ins_aft)) + stdout = stdout.replace(ins_aft, quoted_string_output_json(ins_aft)) if ins_bef: - stdout = stdout.replace(ins_bef, quotedString(ins_bef)) + stdout = stdout.replace(ins_bef, quoted_string_output_json(ins_bef)) # Try to extract information from stdout ret = json.loads(stdout) + ret['cmd'] = ret['cmd'].replace("u'", '"') + result['cmd'] = ret['cmd'] result['changed'] = ret['changed'] result['found'] = ret['found'] diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 5e29674e4..37f1818d4 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -133,6 +133,16 @@ export PYTHON_HOME export _BPXK_AUTOCVT""" +TEST_CONTENT_DOUBLEQUOTES = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//USSCMD EXEC PGM=BPXBATCH +//STDERR DD SYSOUT=* +//STDOUT DD SYSOUT=* +//STDPARM DD * +SH ls -la /; +sleep 30; +/* +//""" + # supported data set types # DS_TYPE = ['SEQ', 'PDS', 'PDSE'] DS_TYPE = ['SEQ'] @@ -204,6 +214,9 @@ test_uss_block_insert_with_indentation_level_specified=dict( insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16), + test_uss_block_insert_with_doublequotes=dict( + insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', + marker="// {mark} ANSIBLE MANAGED BLOCK",state="present"), test_ds_block_insertafter_regex=dict(test_name="T1"), test_ds_block_insertbefore_regex=dict(test_name="T2"), test_ds_block_insertafter_eof=dict(test_name="T3"), @@ -264,6 +277,19 @@ export PKG_CONFIG_PATH export PYTHON_HOME export _BPXK_AUTOCVT""", + test_uss_block_insert_with_doublequotes="""//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//USSCMD EXEC PGM=BPXBATCH +//STDERR DD SYSOUT=* +//STDOUT DD SYSOUT=* +//STDPARM DD * +SH ls -la /; +sleep 30; +// BEGIN ANSIBLE MANAGED BLOCK +cat "//OMVSADMI.CAT" +cat "//OMVSADM.COPYMEM.TESTS" > test.txt +// END ANSIBLE MANAGED BLOCK +/* +//""", test_uss_block_insertbefore_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -1174,6 +1200,15 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): TEST_INFO["expected"]["test_uss_block_insert_with_indentation_level_specified"]) +@pytest.mark.uss +def test_uss_block_insert_with_doublequotes(ansible_zos_module): + TEST_ENV["TEST_CONT"] = TEST_CONTENT_DOUBLEQUOTES + UssGeneral( + "test_uss_block_insert_with_doublequotes", ansible_zos_module,TEST_ENV, + TEST_INFO["test_uss_block_insert_with_doublequotes"], + TEST_INFO["expected"]["test_uss_block_insert_with_doublequotes"]) + TEST_ENV["TEST_CONT"] = TEST_CONTENT + @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): try: From d361802aa8f7a97c25d61682c5d11c2a91656783 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sat, 8 Apr 2023 00:56:59 -0600 Subject: [PATCH 078/413] zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> --- ...es-is-applied-to-destination-directory.yml | 3 +++ plugins/module_utils/encode.py | 19 +++++++++++++++++++ plugins/modules/zos_blockinfile.py | 12 ------------ plugins/modules/zos_copy.py | 4 ++-- .../modules/test_zos_blockinfile_func.py | 2 +- 5 files changed, 25 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml diff --git a/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml b/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml new file mode 100644 index 000000000..970741107 --- /dev/null +++ b/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml @@ -0,0 +1,3 @@ +minor_changes: +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. + (https://github.com/ansible-collections/ibm_zos_core/pull/723) \ No newline at end of file diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index cfcfd2bf0..fa84c6fb3 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -496,6 +496,25 @@ def uss_file_tag(self, file_path): except Exception: return None + def uss_tag_encoding(self, file_path, tag): + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) + class EncodeError(Exception): def __init__(self, message): diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index c9e504740..014382f1e 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -197,13 +197,11 @@ block: | MOUNT FILESYSTEM('SOME.DATA.SET') TYPE(ZFS) MODE(READ) MOUNTPOINT('/tmp/src/somedirectory') - - name: Remove a library as well as surrounding markers zos_blockinfile: state: absent src: SYS1.PARMLIB(PROG00) marker: "/* {mark} ANSIBLE MANAGED BLOCK FOR SOME.DATA.SET */" - - name: Add ZOAU path to PATH in /etc/profile zos_blockinfile: src: /etc/profile @@ -212,7 +210,6 @@ ZOAU=/path/to/zoau_dir/bin export ZOAU PATH=$ZOAU:$PATH - - name: Insert/Update HTML surrounded by custom markers after <body> line zos_blockinfile: path: /var/www/html/index.html @@ -221,13 +218,11 @@ block: | <h1>Welcome to {{ ansible_hostname }}</h1> <p>Last updated on {{ ansible_date_time.iso8601 }}</p> - - name: Remove HTML as well as surrounding markers zos_blockinfile: path: /var/www/html/index.html state: absent marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->" - - name: Add mappings to /etc/hosts zos_blockinfile: path: /etc/hosts @@ -238,7 +233,6 @@ - { name: host1, ip: 10.10.1.10 } - { name: host2, ip: 10.10.1.11 } - { name: host3, ip: 10.10.1.12 } - - name: Add a code block to a member using a predefined indentation. zos_blockinfile: path: SYS1.PARMLIB(BPXPRM00) @@ -348,12 +342,10 @@ def transformBlock(block, indentation_char, indentation_spaces): """Prepends the specified number of spaces to the block in all lines - Arguments: block: {str} -- The block text to be transformed. indentation_char: {str} -- The indentation char to be used. indentation_spaces: {int} -- Number of times the indentation char to prepend. - Returns: block: {str} -- The text block after applying the necessary transformations. """ @@ -372,7 +364,6 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): """Replace a block with the matching regex pattern Insert a block before/after the matching pattern Insert a block at BOF/EOF - Arguments: src: {str} -- The z/OS USS file or data set to modify. block: {str} -- The block to insert/replace into the src. @@ -387,7 +378,6 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): - '*regex*' encoding: {str} -- Encoding of the src. force: {str} -- If not empty passes the -f option to dmod cmd. - Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command @@ -399,13 +389,11 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): def absent(src, marker, encoding, force): """Delete blocks with matching regex pattern - Arguments: src: {str} -- The z/OS USS file or data set to modify. marker: {str} -- Identifies the block to be removed. encoding: {str} -- Encoding of the src. force: {str} -- If not empty passes the -f option to dmod cmd. - Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 57a16545e..2fe9ffd4c 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1056,8 +1056,8 @@ def copy_to_uss( group = self.common_file_args.get("group") owner = self.common_file_args.get("owner") if mode is not None: - self.module.set_mode_if_different(dest, mode, False) - + if not os.path.isdir(dest): + self.module.set_mode_if_different(dest, mode, False) if changed_files: for filepath in changed_files: self.module.set_mode_if_different(os.path.join(dest, filepath), mode, False) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 37f1818d4..f6b735487 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1545,4 +1545,4 @@ def test_ds_not_supported(ansible_zos_module, dstype): DsNotSupportedHelper( TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], ansible_zos_module, TEST_ENV, TEST_INFO["test_uss_block_insertafter_regex"] - ) + ) \ No newline at end of file From 2df8bfe407d0da5ba6e6879fe795fef2e24d2291 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Sun, 9 Apr 2023 23:23:51 -0400 Subject: [PATCH 079/413] corrected job test case that wanted to extract job id. --- tests/functional/modules/test_zos_job_query_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index f0e53a556..3386467b5 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -71,7 +71,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 - fulljobid = result.get("jobs")[0].get("res_code").get("job_id") + fulljobid = result.get("jobs")[0].get("job_id") jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' qresults = hosts.all.zos_job_query(jobmask) for qresult in qresults.contacted.values(): From 18126332afb9f99c2af44b35285fd1977d068ba5 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 10 Apr 2023 11:30:56 -0400 Subject: [PATCH 080/413] changed call to zos_job_query in the functional test. --- tests/functional/modules/test_zos_job_query_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 3386467b5..0231cc874 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -73,7 +73,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): fulljobid = result.get("jobs")[0].get("job_id") jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' - qresults = hosts.all.zos_job_query(jobmask) + qresults = hosts.all.zos_job_query(job_id=jobmask) for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None From 4f4c2644c31cfd178a032336f1e730a0e181dc4a Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Mon, 10 Apr 2023 14:11:21 -0700 Subject: [PATCH 081/413] zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...8-zos-data-set-support-disposition-shr.yml | 2 + plugins/module_utils/data_set.py | 19 ++- plugins/modules/zos_data_set.py | 71 ++++++++- .../modules/test_zos_data_set_func.py | 140 ++++++++++++++++++ 4 files changed, 226 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/358-zos-data-set-support-disposition-shr.yml diff --git a/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml b/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml new file mode 100644 index 000000000..4102bab0d --- /dev/null +++ b/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml @@ -0,0 +1,2 @@ +minor_changes: + - zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 2549c345c..8295a6541 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -116,6 +116,7 @@ def ensure_present( sms_management_class=None, volumes=None, tmp_hlq=None, + force=None, ): """Creates data set if it does not already exist. @@ -171,6 +172,8 @@ def ensure_present( has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. tmp_hlq (str, optional): High level qualifier for temporary datasets. + force (bool, optional): Used to determine behavior when performing member operations on a pdse. + Defaults to None. Returns: bool -- Indicates if changes were made. @@ -247,11 +250,11 @@ def ensure_member_present(name, replace=False): return True @staticmethod - def ensure_member_absent(name): + def ensure_member_absent(name, force=False): """Deletes provided data set member if it exists. Returns a boolean indicating if changes were made.""" if DataSet.data_set_member_exists(name): - DataSet.delete_member(name) + DataSet.delete_member(name, force) return True return False @@ -772,6 +775,7 @@ def replace( sms_management_class=None, volumes=None, tmp_hlq=None, + force=None, ): """Attempts to replace an existing data set. @@ -826,6 +830,8 @@ def replace( has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. tmp_hlq (str, optional): High level qualifier for temporary datasets. + force (bool, optional): Used to determine behavior when performing member operations on a pdse. + Defaults to None. """ arguments = locals() DataSet.delete(name) @@ -884,6 +890,7 @@ def create( sms_management_class=None, volumes=None, tmp_hlq=None, + force=None, ): """A wrapper around zoautil_py Dataset.create() to raise exceptions on failure. @@ -940,6 +947,8 @@ def create( has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. tmp_hlq (str, optional): High level qualifier for temporary datasets. + force (bool, optional): Used to determine behavior when performing member operations on a pdse. + Defaults to None. Raises: DatasetCreateError: When data set creation fails. """ @@ -992,7 +1001,7 @@ def create_member(name): raise DatasetMemberCreateError(name, rc) @staticmethod - def delete_member(name): + def delete_member(name, force=False): """A wrapper around zoautil_py Dataset.delete_members() to raise exceptions on failure. @@ -1002,7 +1011,7 @@ def delete_member(name): Raises: DatasetMemberDeleteError: When data set member deletion fails. """ - rc = datasets.delete_members(name) + rc = datasets.delete_members(name, force=force) if rc > 0: raise DatasetMemberDeleteError(name, rc) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index c3a6936d7..3e7ee1700 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -43,6 +43,10 @@ - > If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). + - > + If I(state=absent) and I(type=MEMBER) and I(force=True), the data set + will be opened with I(DISP=SHR) such that the entire data set can be + accessed by other processes while the specified member is deleted. - > If I(state=absent) and I(volumes) is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied @@ -247,6 +251,20 @@ that is not available, then the value C(TMPHLQ) is used. required: false type: str + force: + description: + - Specifies that the data set can be shared with others during a member + delete operation which results in the data set you are updating to be + simultaneously updated by others. + - This is helpful when a data set is being used in a long running process + such as a started task and you are wanting to delete a member. + - The I(force=True) option enables sharing of data sets through the + disposition I(DISP=SHR). + - The I(force=True) only applies to data set members when I(state=absent) + and I(type=MEMBER). + type: bool + required: false + default: false batch: description: - Batch can be used to perform operations on multiple data sets in a single module call. @@ -271,6 +289,11 @@ - > If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). + - > + If I(state=absent) and I(type=MEMBER) and I(force=True), the data + set will be opened with I(DISP=SHR) such that the entire data set + can be accessed by other processes while the specified member is + deleted. - > If I(state=absent) and I(volumes) is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied @@ -467,6 +490,21 @@ type: bool required: false default: false + force: + description: + - Specifies that the data set can be shared with others during a member + delete operation which results in the data set you are updating to + be simultaneously updated by others. + - This is helpful when a data set is being used in a long running + process such as a started task and you are wanting to delete a + member. + - The I(force=True) option enables sharing of data sets through the + disposition I(DISP=SHR). + - The I(force=True) only applies to data set members when + I(state=absent) and I(type=MEMBER). + type: bool + required: false + default: false """ EXAMPLES = r""" @@ -552,6 +590,13 @@ state: absent type: MEMBER +- name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR + zos_data_set: + name: someds.name.here(mydata) + state: absent + type: MEMBER + force: yes + - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: @@ -894,6 +939,9 @@ def perform_data_set_operations(name, state, **extra_args): """Calls functions to perform desired operations on one or more data sets. Returns boolean indicating if changes were made.""" changed = False + # passing in **extra_args forced me to modify the acceptable parameters + # for multiple functions in data_set.py including ensure_present, replace + # and create where the force parameter has no bearing. if state == "present" and extra_args.get("type") != "MEMBER": changed = DataSet.ensure_present(name, **extra_args) elif state == "present" and extra_args.get("type") == "MEMBER": @@ -901,7 +949,7 @@ def perform_data_set_operations(name, state, **extra_args): elif state == "absent" and extra_args.get("type") != "MEMBER": changed = DataSet.ensure_absent(name, extra_args.get("volumes")) elif state == "absent" and extra_args.get("type") == "MEMBER": - changed = DataSet.ensure_member_absent(name) + changed = DataSet.ensure_member_absent(name, extra_args.get("force")) elif state == "cataloged": changed = DataSet.ensure_cataloged(name, extra_args.get("volumes")) elif state == "uncataloged": @@ -1017,6 +1065,11 @@ def parse_and_validate_args(params): aliases=["volume"], dependencies=["state"], ), + force=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1086,6 +1139,11 @@ def parse_and_validate_args(params): required=False, default=None ), + force=dict( + type="bool", + required=False, + default=False, + ), mutually_exclusive=[ ["batch", "name"], # ["batch", "state"], @@ -1102,6 +1160,7 @@ def parse_and_validate_args(params): ["batch", "key_length"], # ["batch", "replace"], ["batch", "volumes"], + # ["batch", "force"], ], ) parser = BetterArgParser(arg_defs) @@ -1162,6 +1221,11 @@ def run_module(): default=False, ), volumes=dict(type="raw", required=False, aliases=["volume"]), + force=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1213,6 +1277,11 @@ def run_module(): required=False, default=None ), + force=dict( + type="bool", + required=False, + default=False + ), ) result = dict(changed=False, message="", names=[]) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 991ce07ca..37bdcb682 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -16,9 +16,12 @@ __metaclass__ = type import pytest +import time +import subprocess from pipes import quote from pprint import pprint + # TODO: determine if data set names need to be more generic for testcases # TODO: add additional tests to check additional data set creation parameter combinations @@ -460,6 +463,143 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +def test_data_member_force_delete(ansible_zos_module): + MEMBER_1, MEMBER_2, MEMBER_3, MEMBER_4 = "MEM1", "MEM2", "MEM3", "MEM4" + try: + hosts = ansible_zos_module + + # set up: + # create pdse + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + for result in results.contacted.values(): + assert result.get("changed") is True + + # add members + results = hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # ensure data set/members create successful + for result in results.contacted.values(): + assert result.get("changed") is True + + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + # non-force attempt to delete MEMBER_2 - should fail since pdse in in use. + results = hosts.all.zos_data_set( + name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_2), + state="absent", + type="MEMBER" + ) + for result in results.contacted.values(): + assert result.get("failed") is True + assert "DatasetMemberDeleteError" in result.get("msg") + + # attempt to delete MEMBER_3 with force option. + results = hosts.all.zos_data_set( + name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="MEMBER", force=True + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + + # attempt to delete MEMBER_4 with force option in batch mode. + results = hosts.all.zos_data_set( + batch=[ + { + "name": "{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_4), + "state": "absent", + "type": "MEMBER", + "force": True + } + ] + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + + # confirm member deleted with mls -- mem1 and mem2 should be present but no mem3 and no mem4 + results = hosts.all.command(cmd="mls {0}".format(DEFAULT_DATA_SET_NAME)) + for result in results.contacted.values(): + assert MEMBER_1 in result.get("stdout") + assert MEMBER_2 in result.get("stdout") + assert MEMBER_3 not in result.get("stdout") + assert MEMBER_4 not in result.get("stdout") + + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + + def test_repeated_operations(ansible_zos_module): try: hosts = ansible_zos_module From 574d0d8a7ad8f5828297b98ed50d6f6f85ad1a80 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 13:00:40 -0700 Subject: [PATCH 082/413] Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- .../323-zos-job-query-handle-multiple-wildcards.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml index a35827e24..060df2fb1 100644 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -1,4 +1,7 @@ minor_changes: - - zos_job_query - This enhancement adjusts the job_name and job_id parameters to handle embedded wildcards. - This also required change to job.py/_get_job_status to follow the wildcard feature, using fnmatch logic. - (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_job_query - ansible module does not support positional wild card placement + for `job_name1 or `job_id`. This enhancement allows embedded wildcards + throughout the `job_name` and `job_id`. + (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- module_utils - job.py utility did not support positional wiled card placement, + this enhancement uses `fnmatch` logic to support wild cards. From ded116a442b06d0648df6dec6423931dbeab6d6b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:22:15 -0700 Subject: [PATCH 083/413] Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_query.py | 50 ++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 3870440d8..18ad27072 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -23,9 +23,10 @@ short_description: Query job status description: - List z/OS job(s) and the current status of the job(s). + - Uses job_name to filter the jobs by the job name. + - Uses job_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. - - Uses job_id to filter the jobs by the job id. author: - "Ping Xiao (@xiaopingBJ)" - "Demetrios Dimatos (@ddimatos)" @@ -33,7 +34,10 @@ options: job_name: description: - - The job name to query. Job name can now contain multiple embedded asterisks. + - The job name to query. + - A job name can be up to 8 characters long. + - The I(job_name) can contain include multiple wildcards. + - The asterisk (`*`) wildcard will match zero or more specified characters. type: str required: False default: "*" @@ -46,41 +50,43 @@ required: False job_id: description: - - The job number that has been assigned to the job. These normally begin - with STC, JOB, TSU and are followed by 5 digits. When job are - potentially greater than 99,999, the job number format will begin with - S, J, T and are followed by 7 digits. Job id can now contain multiple, - embedded asterisks. + - The job id that has been assigned to the job. + - A job id begins must begin with `STC`, `JOB`, `TSU` and are + followed by up to 5 digits. + - When a job id is greater than 99,999, the job id format will begin + with `S`, `J`, `T` and are followed by 7 digits. + - The I(job_id) can contain include multiple wildcards. + - The asterisk (`*`) wildcard will match zero or more specified characters. type: str required: False """ EXAMPLES = r""" -- name: list zos jobs with a jobname 'IYK3ZNA1' +- name: Query a job with a job name of 'JOB12345' zos_job_query: - job_name: "IYK3ZNA1" + job_name: "JOB12345" -- name: list the jobs matching jobname 'IYK3*' +- name: Query jobs using a wildcard to match any job id begging with 'JOB12' zos_job_query: - job_name: "IYK3*" + job_id: "JOB12*" -- name: list the jobs that match 'IYKsomethingNAsomething' +- name: Query jobs using wildcards to match any job name begging with 'H' and ending in 'O'. zos_job_query: - job_name: "IYK*NA*" + job_name: "H*O" -- name: list the jobs with JOB in the x014x range only +- name: Query jobs using a wildcards to match a range of job id(s) that include 'JOB' and '014'. zos_job_query: - job_idname: JOB*014* + job_id: JOB*014* -- name: list the job with a jobname 'IYK3ZNA*' and jobid as JOB01427 +- name: Query all job names beginning wih 'H' that match job id range that include '14'. zos_job_query: - job_name: IYK3ZNA* - job_id: JOB01427 + job_name: "H*" + job_id: "JOB*14*" -- name: list the job with a jobname 'IYK3ZNA*' and owner as BROWNAD +- name: Query all jobs names beginning with 'LINK' for owner 'ADMIN'. zos_job_query: - job_name: IYK3ZNA* - owner: BROWNAD + job_name: "LINK*" + owner: ADMIN """ RETURN = r""" From b9d6be1098ff6b291386fbc6a9d682126207fc76 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:25:28 -0700 Subject: [PATCH 084/413] Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 18ad27072..4ff99a128 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -78,7 +78,7 @@ zos_job_query: job_id: JOB*014* -- name: Query all job names beginning wih 'H' that match job id range that include '14'. +- name: Query all job names beginning wih 'H' that match job id that includes '14'. zos_job_query: job_name: "H*" job_id: "JOB*14*" From 28b910473851f6124bc648912a663b87bdb1d43b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:37:53 -0700 Subject: [PATCH 085/413] Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_query.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 4ff99a128..bbd4f0e77 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -177,14 +177,14 @@ sample: [ { - "job_name": "IYK3ZNA1", - "owner": "BROWNAD", + "job_name": "LINKJOB", + "owner": "ADMIN", "job_id": "JOB01427", "ret_code": "null", }, { - "job_name": "IYK3ZNA2", - "owner": "BROWNAD", + "job_name": "LINKCBL", + "owner": "ADMIN", "job_id": "JOB16577", "ret_code": { "msg": "CANCELED", "code": "null" }, }, From a2ca30216a9d82eda22bb3581939e52dbc87441d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:39:18 -0700 Subject: [PATCH 086/413] update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_query.rst | 55 +++++++++++++++++++-------- 1 file changed, 39 insertions(+), 16 deletions(-) diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 76fccad68..1d94f9047 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -17,9 +17,10 @@ zos_job_query -- Query job status Synopsis -------- - List z/OS job(s) and the current status of the job(s). +- Uses job_name to filter the jobs by the job name. +- Uses job_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. -- Uses job_id to filter the jobs by the job id. @@ -32,6 +33,12 @@ Parameters job_name The job name to query. + A job name can be up to 8 characters long. + + The *job_name* can contain include multiple wildcards. + + The asterisk (`*`) wildcard will match zero or more specified characters. + | **required**: False | **type**: str | **default**: * @@ -47,7 +54,15 @@ owner job_id - The job number that has been assigned to the job. These normally begin with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with S, J, T and are followed by 7 digits. + The job id that has been assigned to the job. + + A job id begins must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. + + When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. + + The *job_id* can contain include multiple wildcards. + + The asterisk (`*`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -61,23 +76,31 @@ Examples .. code-block:: yaml+jinja - - name: list zos jobs with a jobname 'IYK3ZNA1' + - name: Query a job with a job name of 'JOB12345' + zos_job_query: + job_name: "JOB12345" + + - name: Query jobs using a wildcard to match any job id begging with 'JOB12' + zos_job_query: + job_id: "JOB12*" + + - name: Query jobs using wildcards to match any job name begging with 'H' and ending in 'O'. zos_job_query: - job_name: "IYK3ZNA1" + job_name: "H*O" - - name: list the jobs matching jobname 'IYK3*' + - name: Query jobs using a wildcards to match a range of job id(s) that include 'JOB' and '014'. zos_job_query: - job_name: "IYK3*" + job_id: JOB*014* - - name: list the job with a jobname 'IYK3ZNA*' and jobid as JOB01427 + - name: Query all job names beginning wih 'H' that match job id that includes '14'. zos_job_query: - job_name: IYK3ZNA* - job_id: JOB01427 + job_name: "H*" + job_id: "JOB*14*" - - name: list the job with a jobname 'IYK3ZNA*' and owner as BROWNAD + - name: Query all jobs names beginning with 'LINK' for owner 'ADMIN'. zos_job_query: - job_name: IYK3ZNA* - owner: BROWNAD + job_name: "LINK*" + owner: ADMIN @@ -111,14 +134,14 @@ jobs [ { "job_id": "JOB01427", - "job_name": "IYK3ZNA1", - "owner": "BROWNAD", + "job_name": "LINKJOB", + "owner": "ADMIN", "ret_code": "null" }, { "job_id": "JOB16577", - "job_name": "IYK3ZNA2", - "owner": "BROWNAD", + "job_name": "LINKCBL", + "owner": "ADMIN", "ret_code": { "code": "null", "msg": "CANCELED" From 34ece4c330765f42ae541d9c23b8783e51671bea Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:43:52 -0700 Subject: [PATCH 087/413] Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_query.rst | 6 +++--- plugins/modules/zos_job_query.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 1d94f9047..d33ca6744 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -153,16 +153,16 @@ jobs The name of the batch job. | **type**: str - | **sample**: IYK3ZNA2 + | **sample**: LINKJOB owner The owner who ran the job. | **type**: str - | **sample**: BROWNAD + | **sample**: ADMIN job_id - Unique job id assigned to the job by JES. + Unique job identifier assigned to the job by JES. | **type**: str | **sample**: JOB01427 diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index bbd4f0e77..ed31f0c0d 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -108,15 +108,15 @@ description: The name of the batch job. type: str - sample: IYK3ZNA2 + sample: LINKJOB owner: description: The owner who ran the job. type: str - sample: BROWNAD + sample: ADMIN job_id: description: - Unique job id assigned to the job by JES. + Unique job identifier assigned to the job by JES. type: str sample: JOB01427 ret_code: From 092cfd5e8d983ebc7beaf7a7a6a120220469d043 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 11 Apr 2023 18:02:08 -0400 Subject: [PATCH 088/413] Added handler for job not found edge cases (None not iterable errors) --- plugins/modules/zos_job_query.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index bbd4f0e77..efa6305a6 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -313,7 +313,13 @@ def parsing_jobs(jobs_raw): for job in jobs_raw: # Easier to see than checking for an empty string, JOB NOT FOUND was # replaced with None in the jobs.py and msg_txt field describes the job query instead - status_raw = job.get("ret_code").get("msg", "JOB NOT FOUND") + if job.get("ret_code") is None: + status_raw = "JOB NOT FOUNDa" + elif job.get("ret_code").get("msg", "JOB NOT FOUND") is None: + status_raw = "JOB NOT FOUNDb" + else: + status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") + if "AC" in status_raw: # the job is active ret_code = None @@ -332,9 +338,11 @@ def parsing_jobs(jobs_raw): elif "ABENDU" in status_raw: # status = 'Ended abnormally' ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} + elif "CANCELED" in status_raw or "JCLERR" in status_raw or "JCL ERROR" in status_raw or "JOB NOT FOUND" in status_raw: # status = status_raw ret_code = {"msg": status_raw, "code": None} + else: # status = 'Unknown' ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} From a62d76a1a073520b74f71be058137fccb5824c0f Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 11 Apr 2023 18:12:41 -0400 Subject: [PATCH 089/413] corrected pep8 issue (bad indent) --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index dfce6da0e..7e136a3ba 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -318,7 +318,7 @@ def parsing_jobs(jobs_raw): elif job.get("ret_code").get("msg", "JOB NOT FOUND") is None: status_raw = "JOB NOT FOUNDb" else: - status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") + status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") if "AC" in status_raw: # the job is active From f5de722a0bce4cd7b788ea5cf57687a030559dad Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 11 Apr 2023 18:18:26 -0400 Subject: [PATCH 090/413] removed tracking text from error/not found messages. --- plugins/modules/zos_job_query.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 7e136a3ba..9c2c7dd86 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -314,11 +314,11 @@ def parsing_jobs(jobs_raw): # Easier to see than checking for an empty string, JOB NOT FOUND was # replaced with None in the jobs.py and msg_txt field describes the job query instead if job.get("ret_code") is None: - status_raw = "JOB NOT FOUNDa" + status_raw = "JOB NOT FOUND" elif job.get("ret_code").get("msg", "JOB NOT FOUND") is None: - status_raw = "JOB NOT FOUNDb" + status_raw = "JOB NOT FOUND" else: - status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") + status_raw = job.get("ret_code").get("msg", "JOB NOT FOUND") if "AC" in status_raw: # the job is active From c1126f2e1b52e40752fda28cccaf052ae9f96e7b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 12 Apr 2023 09:52:27 -0600 Subject: [PATCH 091/413] Update zos_job_query.py --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 9c2c7dd86..28d38b727 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -51,7 +51,7 @@ job_id: description: - The job id that has been assigned to the job. - - A job id begins must begin with `STC`, `JOB`, `TSU` and are + - A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. - When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. From 81b35877dcaf76be46680e989803d9d513027c5f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 15:39:25 -0600 Subject: [PATCH 092/413] Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug --- .../fragments/601-copy-loadlib-member.yml | 3 + .../fragments/734-copy-loadlib-member.yml | 3 + .../functional/modules/test_zos_copy_func.py | 229 ++++++++++++++++++ 3 files changed, 235 insertions(+) create mode 100644 changelogs/fragments/601-copy-loadlib-member.yml create mode 100644 changelogs/fragments/734-copy-loadlib-member.yml diff --git a/changelogs/fragments/601-copy-loadlib-member.yml b/changelogs/fragments/601-copy-loadlib-member.yml new file mode 100644 index 000000000..75b59e654 --- /dev/null +++ b/changelogs/fragments/601-copy-loadlib-member.yml @@ -0,0 +1,3 @@ +bugfixes: +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. + (https://github.com/ansible-collections/ibm_zos_core/pull/640) diff --git a/changelogs/fragments/734-copy-loadlib-member.yml b/changelogs/fragments/734-copy-loadlib-member.yml new file mode 100644 index 000000000..ebbaad48c --- /dev/null +++ b/changelogs/fragments/734-copy-loadlib-member.yml @@ -0,0 +1,3 @@ +bugfixes: +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. + (https://github.com/ansible-collections/ibm_zos_core/pull/734) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 884f0e3d6..cfe8e0ee9 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -56,6 +56,58 @@ TEST_PDSE = "SYS1.NFSLIBE" TEST_PDSE_MEMBER = "SYS1.NFSLIBE(GFSAMAIN)" +COBOL_SRC = """ + IDENTIFICATION DIVISION.\n + PROGRAM-ID. HELLOWRD.\n +\n + PROCEDURE DIVISION.\n + DISPLAY "SIMPLE HELLO WORLD".\n + STOP RUN.\n +""" + +LINK_JCL = """ +//COMPLINK JOB MSGCLASS=H,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//STEP1 EXEC PGM=IGYCRCTL +//STEPLIB DD DSN=IGYV5R10.SIGYCOMP,DISP=SHR +// DD DSN=IGYV5R10.SIGYMAC,DISP=SHR +//SYSIN DD DISP=SHR,DSN={0} +//SYSPRINT DD SYSOUT=* +//SYSLIN DD UNIT=SYSDA,DISP=(MOD), +// SPACE=(CYL,(1,1)), +// DCB=(RECFM=FB,LRECL=80,BLKSIZE=27920), +// DSN=&&LOADSET +//SYSUT1 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT2 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT3 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT4 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT5 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT6 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT7 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT8 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT9 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT10 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT11 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT12 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT13 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT14 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT15 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSMDECK DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//* +//LKED EXEC PGM=IEWL,REGION=0M +//SYSPRINT DD SYSOUT=* +//SYSLIB DD DSN=CEE.SCEELKED,DISP=SHR +// DD DSN=CEE.SCEELKEX,DISP=SHR +//SYSLMOD DD DSN={1}, +// DISP=SHR +//SYSUT1 DD UNIT=SYSDA,DCB=BLKSIZE=1024, +// SPACE=(TRK,(3,3)) +//SYSTERM DD SYSOUT=* +//SYSPRINT DD SYSOUT=* +//SYSLIN DD DSN=&&LOADSET,DISP=(OLD,KEEP) +//SYSIN DD DUMMY +//* + +""" def populate_dir(dir_path): for i in range(5): @@ -145,6 +197,42 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.file(path=record_src, state="absent") +def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): + """ + Given a PDSE, links a cobol program making allocated in a temp ds resulting in ds_name + as a loadlib. + + Arguments: + ds_name (str) -- PDS/E to be linked with the cobol program. + cobol_src (str) -- Cobol source code to be used as the program. + + Notes: PDS names are in the format of SOME.PDSNAME(MEMBER) + """ + # Copy the Link program + temp_jcl = "/tmp/link.jcl" + rc = 0 + try: + cp_res = hosts.all.zos_copy( + content=LINK_JCL.format(cobol_pds, ds_name), + dest="/tmp/link.jcl", + force=True, + ) + for res in cp_res.contacted.values(): + print("copy link program result {0}".format(res)) + # Link the temp ds with ds_name + job_result = hosts.all.zos_job_submit( + src="/tmp/link.jcl", + location="USS", + wait_time_s=60 + ) + for result in job_result.contacted.values(): + print("link job submit result {0}".format(result)) + rc = result.get("jobs")[0].get("ret_code").get("code") + finally: + hosts.all.file(path=temp_jcl, state="absent") + return rc + + @pytest.mark.uss @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), @@ -1679,6 +1767,147 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts.all.zos_data_set(name=dest, state="absent") +@pytest.mark.pdse +def test_copy_pds_member_with_system_symbol(ansible_zos_module,): + """This test is for bug #543 in GitHub. In some versions of ZOAU, + datasets.listing can't handle system symbols in volume names and + therefore fails to get details from a dataset. + """ + hosts = ansible_zos_module + # The volume for this dataset should use a system symbol. + # This dataset and member should be available on any z/OS system. + src = "SYS1.SAMPLIB(IZUPRM00)" + dest = "USER.TEST.PDS.DEST" + + try: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + replace=True + ) + + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) + verify_copy = hosts.all.shell( + cmd="mls {0}".format(dest), + executable=SHELL_EXECUTABLE + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == dest + + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + assert len(stdout.splitlines()) == 1 + + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.pdse +def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): + hosts = ansible_zos_module + # The volume for this dataset should use a system symbol. + # This dataset and member should be available on any z/OS system. + src = "USER.LOAD.SRC" + dest = "USER.LOAD.DEST" + cobol_pds = "USER.COBOL.SRC" + try: + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + hosts.all.zos_data_set( + name=cobol_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + member = "HELLOSRC" + cobol_pds = "{0}({1})".format(cobol_pds, member) + rc = hosts.all.zos_copy( + content=COBOL_SRC, + dest=cobol_pds, + ) + dest_name = "{0}({1})".format(dest, member) + src_name = "{0}({1})".format(src, member) + + + # both src and dest need to be a loadlib + rc = link_loadlib_from_cobol(hosts, dest_name, cobol_pds) + assert rc == 0 + # make sure is executable + cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" + exec_res = hosts.all.shell( + cmd=cmd.format(member, dest) + ) + for result in exec_res.contacted.values(): + assert result.get("rc") == 0 + rc = link_loadlib_from_cobol(hosts, src_name, cobol_pds) + assert rc == 0 + + exec_res = hosts.all.shell( + cmd=cmd.format(member, src) + ) + for result in exec_res.contacted.values(): + assert result.get("rc") == 0 + + copy_res = hosts.all.zos_copy( + src="{0}({1})".format(src, member), + dest="{0}({1})".format(dest, "MEM1"), + remote_src=True) + + verify_copy = hosts.all.shell( + cmd="mls {0}".format(dest), + executable=SHELL_EXECUTABLE + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest, "MEM1") + + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + # number of members + assert len(stdout.splitlines()) == 2 + + finally: + hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=cobol_pds, state="absent") + + @pytest.mark.pdse def test_copy_multiple_data_set_members(ansible_zos_module): hosts = ansible_zos_module From 850f519d28f335d3b43683514966529a67cc0f3b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 17:19:48 -0600 Subject: [PATCH 093/413] Removed previous changelog --- changelogs/fragments/601-copy-loadlib-member.yml | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 changelogs/fragments/601-copy-loadlib-member.yml diff --git a/changelogs/fragments/601-copy-loadlib-member.yml b/changelogs/fragments/601-copy-loadlib-member.yml deleted file mode 100644 index 75b59e654..000000000 --- a/changelogs/fragments/601-copy-loadlib-member.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: -- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. - (https://github.com/ansible-collections/ibm_zos_core/pull/640) From e7287920612aa391a0b435eb0664ba8651bcca53 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 17:38:50 -0600 Subject: [PATCH 094/413] Removed unused fragment --- changelogs/fragments/734-copy-loadlib-member.yml | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 changelogs/fragments/734-copy-loadlib-member.yml diff --git a/changelogs/fragments/734-copy-loadlib-member.yml b/changelogs/fragments/734-copy-loadlib-member.yml deleted file mode 100644 index ebbaad48c..000000000 --- a/changelogs/fragments/734-copy-loadlib-member.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: -- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. - (https://github.com/ansible-collections/ibm_zos_core/pull/734) From f02349c0416b564a29b6d678391a796f8bc8c051 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 17:45:06 -0600 Subject: [PATCH 095/413] Removed test case --- .../functional/modules/test_zos_copy_func.py | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cfe8e0ee9..85cd6dfbe 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1767,47 +1767,6 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts.all.zos_data_set(name=dest, state="absent") -@pytest.mark.pdse -def test_copy_pds_member_with_system_symbol(ansible_zos_module,): - """This test is for bug #543 in GitHub. In some versions of ZOAU, - datasets.listing can't handle system symbols in volume names and - therefore fails to get details from a dataset. - """ - hosts = ansible_zos_module - # The volume for this dataset should use a system symbol. - # This dataset and member should be available on any z/OS system. - src = "SYS1.SAMPLIB(IZUPRM00)" - dest = "USER.TEST.PDS.DEST" - - try: - hosts.all.zos_data_set( - name=dest, - state="present", - type="pdse", - replace=True - ) - - copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) - verify_copy = hosts.all.shell( - cmd="mls {0}".format(dest), - executable=SHELL_EXECUTABLE - ) - - for result in copy_res.contacted.values(): - assert result.get("msg") is None - assert result.get("changed") is True - assert result.get("dest") == dest - - for v_cp in verify_copy.contacted.values(): - assert v_cp.get("rc") == 0 - stdout = v_cp.get("stdout") - assert stdout is not None - assert len(stdout.splitlines()) == 1 - - finally: - hosts.all.zos_data_set(name=dest, state="absent") - - @pytest.mark.pdse def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): hosts = ansible_zos_module From 9e355bce1e2df299db6bbf6a39f4b3b0313183d3 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 13 Apr 2023 19:54:46 -0400 Subject: [PATCH 096/413] Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> --- .../fragments/666-zos_tso_command_maxrc.yml | 4 + plugins/modules/zos_tso_command.py | 83 +++++++++++++++---- .../modules/test_zos_tso_command_func.py | 14 +++- 3 files changed, 85 insertions(+), 16 deletions(-) create mode 100644 changelogs/fragments/666-zos_tso_command_maxrc.yml diff --git a/changelogs/fragments/666-zos_tso_command_maxrc.yml b/changelogs/fragments/666-zos_tso_command_maxrc.yml new file mode 100644 index 000000000..c410c00b5 --- /dev/null +++ b/changelogs/fragments/666-zos_tso_command_maxrc.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_tso_command - was enhanced to accept `max_rc` as an option. This option + allows a non-zero return code to succeed as a valid return code. + (https://github.com/ansible-collections/ibm_zos_core/pull/666) diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 7b2601d37..e3c4c6f12 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -20,20 +20,29 @@ DOCUMENTATION = r""" module: zos_tso_command version_added: '1.1.0' -author: "Xiao Yuan Ma (@bjmaxy)" short_description: Execute TSO commands description: - - Execute TSO commands on the target z/OS system with the provided options - and receive a structured response. + - Execute TSO commands on the target z/OS system with the provided options and receive a structured response. +author: + - "Xiao Yuan Ma (@bjmaxy)" + - "Rich Parker (@richp405)" options: commands: description: - One or more TSO commands to execute on the target z/OS system. - Accepts a single string or list of strings as input. + - If a list of strings is provided, processing will stop at the first failure, based on rc. required: true type: raw aliases: - command + max_rc: + description: + - Specifies the maximum return code allowed for a TSO command. + - If more than one TSO command is submitted, the I(max_rc) applies to all TSO commands. + default: 0 + required: false + type: int """ RETURN = r""" @@ -55,6 +64,13 @@ returned: always type: int sample: 0 + max_rc: + description: + - Specifies the maximum return code allowed for a TSO command. + - If more than one TSO command is submitted, the I(max_rc) applies to all TSO commands. + returned: always + type: int + sample: 0 content: description: The response resulting from the execution of the TSO command. @@ -89,6 +105,12 @@ commands: - LU TESTUSER +- name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) + zos_tso_command: + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 + """ from ansible.module_utils.basic import AnsibleModule @@ -100,7 +122,7 @@ ) -def run_tso_command(commands, module): +def run_tso_command(commands, module, max_rc): script = """/* REXX */ PARSE ARG cmd address tso @@ -113,11 +135,11 @@ def run_tso_command(commands, module): x = outtrap('OFF') exit rc """ - command_detail_json = copy_rexx_and_run_commands(script, commands, module) + command_detail_json = copy_rexx_and_run_commands(script, commands, module, max_rc) return command_detail_json -def copy_rexx_and_run_commands(script, commands, module): +def copy_rexx_and_run_commands(script, commands, module, max_rc): command_detail_json = [] delete_on_close = True tmp_file = NamedTemporaryFile(delete=delete_on_close) @@ -131,7 +153,17 @@ def copy_rexx_and_run_commands(script, commands, module): command_results["rc"] = rc command_results["content"] = stdout.split("\n") command_results["lines"] = len(command_results.get("content", [])) + command_results["stderr"] = stderr + + if rc <= max_rc: + command_results["failed"] = False + else: + command_results["failed"] = True + command_detail_json.append(command_results) + if command_results["failed"]: + break + return command_detail_json @@ -158,15 +190,18 @@ def list_or_str_type(contents, dependencies): def run_module(): module_args = dict( commands=dict(type="raw", required=True, aliases=["command"]), + max_rc=dict(type="int", required=False, default=0), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) result = dict( changed=False, + failed=True, ) arg_defs = dict( commands=dict(type=list_or_str_type, required=True, aliases=["command"]), + max_rc=dict(type="int", required=False, default=0), ) try: parser = BetterArgParser(arg_defs) @@ -175,19 +210,37 @@ def run_module(): module.fail_json(msg=repr(e), **result) commands = parsed_args.get("commands") + max_rc = parsed_args.get("max_rc") + if max_rc is None: + max_rc = 0 try: - result["output"] = run_tso_command(commands, module) + result["output"] = run_tso_command(commands, module, max_rc) + result["max_rc"] = max_rc + errors_found = False + result_list = [] + for cmd in result.get("output"): - if cmd.get("rc") != 0: - module.fail_json( - msg='The TSO command "' - + cmd.get("command", "") - + '" execution failed.', - **result - ) + tmp_string = 'Command "' + cmd.get("command", "") + '" execution' + if cmd.get("rc") > max_rc: + errors_found = True + if max_rc > 0: + result_list.append(tmp_string + "failed. RC was {0}; max_rc was {1}".format(cmd.get("rc"), max_rc)) + else: + result_list.append(tmp_string + "failed. RC was {0}.".format(cmd.get("rc"))) + else: + result_list.append(tmp_string + "succeeded. RC was {0}.".format(cmd.get("rc"))) + + if errors_found: + result_string = "\n".join(result_list) + + module.fail_json( + msg="Some ({0}) command(s) failed:\n{1}".format(errors_found, result_string), + **result + ) result["changed"] = True + result["failed"] = False module.exit_json(**result) except Exception as e: diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index eeddd9ef3..dbdf888f4 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -160,3 +160,15 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True + + +# The positive test +# The command that kicks off rc>0 which is allowed +def test_zos_tso_command_maxrc(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_tso_command(commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"],max_rc=4) + for result in results.contacted.values(): + for item in result.get("output"): + print( item ) + assert item.get("rc") < 5 + assert result.get("changed") is True From 9e680031948eb2746207df2324ec72bf518b7a66 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 13 Apr 2023 23:31:56 -0700 Subject: [PATCH 097/413] added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/734-copy-loadlib-member-test-case.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelogs/fragments/734-copy-loadlib-member-test-case.yml diff --git a/changelogs/fragments/734-copy-loadlib-member-test-case.yml b/changelogs/fragments/734-copy-loadlib-member-test-case.yml new file mode 100644 index 000000000..4482c61da --- /dev/null +++ b/changelogs/fragments/734-copy-loadlib-member-test-case.yml @@ -0,0 +1,4 @@ +trivial: +- zos_copy - Adds a test cases to ensure copying from a PDS/E member containing + a loadlib to another PDS/E member loadlib member for issue 601. + (https://github.com/ansible-collections/ibm_zos_core/pull/734) \ No newline at end of file From 192312e9756c24ce8b6e26e8b615258a94e0a3f0 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 17 Apr 2023 09:24:08 -0700 Subject: [PATCH 098/413] zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../740-zos_copy-volume-symbol-test.yml | 5 +++ .../functional/modules/test_zos_copy_func.py | 45 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 changelogs/fragments/740-zos_copy-volume-symbol-test.yml diff --git a/changelogs/fragments/740-zos_copy-volume-symbol-test.yml b/changelogs/fragments/740-zos_copy-volume-symbol-test.yml new file mode 100644 index 000000000..a30a50869 --- /dev/null +++ b/changelogs/fragments/740-zos_copy-volume-symbol-test.yml @@ -0,0 +1,5 @@ +trivial: +- zos_copy - prior, there was no test case for symbols on a volume. + This change adds a test case to test a volume which has in it symbols, + issue 738. + (https://github.com/ansible-collections/ibm_zos_core/pull/740) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 85cd6dfbe..dbc76eeff 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1867,6 +1867,51 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): hosts.all.zos_data_set(name=cobol_pds, state="absent") +@pytest.mark.pdse +def test_copy_pds_member_with_system_symbol(ansible_zos_module,): + """This test is for bug #543 in GitHub. In some versions of ZOAU, + datasets.listing can't handle system symbols in volume names and + therefore fails to get details from a dataset. + + Note: `listcat ent('SYS1.SAMPLIB') all` will display 'volser = ******' + and `D SYMBOLS` will show you that `&SYSR2. = "RES80A"` where + the symbols for this volume correspond to volume `RES80A` + """ + hosts = ansible_zos_module + # The volume for this dataset should use a system symbol. + # This dataset and member should be available on any z/OS system. + src = "SYS1.SAMPLIB(IZUPRM00)" + dest = "USER.TEST.PDS.DEST" + + try: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + replace=True + ) + + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) + verify_copy = hosts.all.shell( + cmd="mls {0}".format(dest), + executable=SHELL_EXECUTABLE + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == dest + + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + assert len(stdout.splitlines()) == 1 + + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + @pytest.mark.pdse def test_copy_multiple_data_set_members(ansible_zos_module): hosts = ansible_zos_module From ee42b6a66017900a891ab57be14de52a637cda87 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Mon, 17 Apr 2023 16:43:56 -0700 Subject: [PATCH 099/413] Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> --- .../654-new-module-zos_volume_init.yml | 2 + plugins/module_utils/ickdsf.py | 151 +++++++ plugins/module_utils/zos_mvs_raw.py | 9 +- plugins/modules/zos_volume_init.py | 272 +++++++++++++ .../functional/modules/test_zos_fetch_func.py | 30 +- .../modules/test_zos_volume_init_func.py | 385 ++++++++++++++++++ tests/sanity/ignore-2.10.txt | 5 +- tests/sanity/ignore-2.11.txt | 4 +- tests/sanity/ignore-2.12.txt | 3 + tests/sanity/ignore-2.13.txt | 1 + tests/sanity/ignore-2.14.txt | 1 + tests/sanity/ignore-2.15.txt | 1 + tests/sanity/ignore-2.9.txt | 5 +- 13 files changed, 861 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/654-new-module-zos_volume_init.yml create mode 100644 plugins/module_utils/ickdsf.py create mode 100644 plugins/modules/zos_volume_init.py create mode 100644 tests/functional/modules/test_zos_volume_init_func.py diff --git a/changelogs/fragments/654-new-module-zos_volume_init.yml b/changelogs/fragments/654-new-module-zos_volume_init.yml new file mode 100644 index 000000000..41808d718 --- /dev/null +++ b/changelogs/fragments/654-new-module-zos_volume_init.yml @@ -0,0 +1,2 @@ +major_changes: +- zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. (https://github.com/ansible-collections/ibm_zos_core/pull/654) \ No newline at end of file diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py new file mode 100644 index 000000000..67ddd3d9d --- /dev/null +++ b/plugins/module_utils/ickdsf.py @@ -0,0 +1,151 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +# from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( +# MissingZOAUImport, +# MissingImport, +# ) + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd # pylint: disable=import-error +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import ( # pylint: disable=import-error + DDStatement, + StdoutDefinition, + StdinDefinition, +) + + +def get_init_command(module, result, args): + + # Get parameters from playbooks + address = args.get('address') + verify_volid = args.get('verify_volid') + verify_offline = args.get('verify_offline') + volid = args.get('volid') + vtoc_size = args.get('vtoc_size') + index = args.get('index') + verify_volume_empty = args.get('verify_volume_empty') + sms_managed = args.get('sms_managed') + + # Let AnsibleModule param parsing handle this check. + # validate parameters + # if address is None: + # msg = 'Volume address must be defined' + # # raise Exception(msg) + # module.fail_json(msg) # TODO - fail with result -- do i want an init class so i can self.fail_json? + + # let ICKDSF handle this check. expect RC=12 + # try: + # int(address, 16) + # except ValueError: + # result['failed'] = True + # msg = 'address must be 3 or 4 64-bit hexadecimal digits' + # # raise Exception(msg) + # module.fail_json(msg, **result) # TODO - fail with result -- do i want an init class so i can self.fail_json? + + # convert playbook args to JCL parameters + cmd_args = { + 'address': 'unit({0})'.format(address) + } + + if vtoc_size: + cmd_args['vtoc_size'] = 'vtoc(0, 1, {0})'.format(vtoc_size) + else: + cmd_args['vtoc_size'] = '' + if volid: + cmd_args['volid'] = 'volid({0})'.format(volid) + else: + cmd_args['volid'] = '' + if not verify_volid: + cmd_args['verify_volid'] = 'noverify' + else: + cmd_args['verify_volid'] = 'verify({0})'.format(verify_volid) + if verify_offline: + cmd_args['verify_offline'] = 'verifyoffline' + else: + cmd_args['verify_offline'] = 'noverifyoffline' + if verify_volume_empty: + cmd_args['verify_volume_empty'] = 'nods' + else: + cmd_args['verify_volume_empty'] = 'ds' + if index: + cmd_args['index'] = '' + else: + cmd_args['index'] = 'noindex' + if sms_managed: + cmd_args['sms_managed'] = 'storagegroup' + else: + cmd_args['sms_managed'] = '' + + # Format into JCL strings for zos_mvs_raw + cmd = [ + ' init {0} {1} {2} {3} - '.format( + cmd_args['address'], + cmd_args['verify_volid'], + cmd_args['verify_offline'], + cmd_args['volid']), + ' {0} {1} {2} {3}'.format( + cmd_args['vtoc_size'], + cmd_args['sms_managed'], + cmd_args['verify_volume_empty'], + cmd_args['index'])] + + return cmd + + +def init(module, result, parsed_args): + # Convert args parsed from module to ickdsf INIT command + cmd = get_init_command(module, result, parsed_args) + + # TODO - add error handling here and in get_init_command() for "bad" cmd + + # define/build DDs to pass into MVS Command + + if parsed_args.get('tmp_hlq'): + sysInDDStatement = DDStatement("SYSIN", StdinDefinition(cmd, tmphlq=parsed_args.get('tmp_hlq'))) + else: + sysInDDStatement = DDStatement("SYSIN", StdinDefinition(cmd)) + + # tmphlq is not currently captured in the construction of the StdoutDefinition DD. + # tmphlq is handled in the mvscmd.execute_authorized call in this case. + sysprintDDStatement = DDStatement("SYSPRINT", StdoutDefinition()) + + dds = [] + dds.append(sysprintDDStatement) + dds.append(sysInDDStatement) + + # invoke MVS Command + if parsed_args.get('tmp_hlq'): + response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE', tmp_hlq=parsed_args.get('tmp_hlq')) + # uncomment the following line to see MVSCmd verbose output in stderr. + # response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE', verbose=True, tmp_hlq=parsed_args.get('tmp_hlq')) + else: + response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE') + # uncomment the following line to see MVSCmd verbose output in stderr. + # response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE', verbose=True) + + rc = response.rc + + result['rc'] = rc + result['content'] = response.stdout.strip().split("\n") + if response.stderr: + result['stderr'] = response.stderr + + if rc != 0: + result['failed'] = True + msg = "Non-zero return code. See 'content' for details." + module.fail_json(msg=msg, **result) + else: + result['changed'] = True + + return dict(result) diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index f8a91ac0d..7c2badf84 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -45,23 +45,26 @@ def execute(pgm, dds, parm="", debug=False, verbose=False): return MVSCmdResponse(rc, out, err) @staticmethod - def execute_authorized(pgm, dds, parm="", debug=False, verbose=False): + def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an authorized MVS command. Args: pgm (str): The name of the program to execute. dds (list[DDStatement]): A list of DDStatement objects. parm (str, optional): Argument string if required by the program. Defaults to "". + tmp_hlq (str): The name of the temporary high level qualifier to use for temp data sets. Returns: MVSCmdResponse: The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) - command = "mvscmdauth {0} {1} {2} ".format( + command = "mvscmdauth {0} {1} {2} {3} ".format( "-d" if debug else "", "-v" if verbose else "", + "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) + rc, out, err = module.run_command(command) return MVSCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py new file mode 100644 index 000000000..03854a80f --- /dev/null +++ b/plugins/modules/zos_volume_init.py @@ -0,0 +1,272 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +DOCUMENTATION = r""" +module: zos_volume_init +short_description: Initialize volumes or minidisks. +description: + - Initialize a volume or minidisk on z/OS. + - I(zos_volume_init) will create the volume label and entry into the volume + table of contents (VTOC). + - Volumes are used for storing data and executable programs. + - A minidisk is a portion of a disk that is linked to your virtual machine. + - A VTOC lists the data sets that reside on a volume, their location, size, and + other attributes. + - I(zos_volume_init) uses the ICKDSF command INIT to initialize a volume. In some + cases the command could be protected by facility class `STGADMIN.ICK.INIT`. + Protection occurs when the class is active, and the class profile is defined. + Ensure the user executing the Ansible task is permitted to execute + ICKDSF command INIT, otherwise, any user can use the command. + - ICKDSF is an Authorized Program Facility (APF) program on z/OS, + I(zos_volume_init) will run in authorized mode but if the program ICKDSF is + not APF authorized, the task will end. + - Note that defaults set on target z/OS systems may override ICKDSF parameters. + - If is recommended that data on the volume is backed up as the I(zos_volume_init) + module will not perform any backups. You can use the + L(zos_backup_restore,./zos_backup_restore.html) module to backup a volume. + + +version_added: 1.6.0 +author: + - "Austen Stewart (@stewartad)" + - "Almigdad Suliman (@Almigdad-Suliman)" + - "Nicholas Teves (@nktvs)" + - "Nuoya Xie (@nxie13)" + - "Trevor Glassey (@tkglassey)" + - "Tyler Edwards (@TLEdwards-Git)" + - "Ketan Kelkar (@ketankelkar)" + +options: + address: + description: + - I(address) is a 3 or 4 digit hexadecimal number that specifies the + address of the volume or minidisk. + - I(address) can be the number assigned to the device (device number) + when it is installed or the virtual address. + required: true + type: str + verify_volid: + description: + - Verify that the volume serial matches what is on the existing volume or minidisk. + - I(verify_volid) must be 1 to 6 alphanumeric characters or "*NONE*". + - To verify that a volume serial number does not exist, use + I(verify_volid="*NONE*"). + - If I(verify_volid) is specified and the volume serial number does not + match that found on the volume or minidisk, initialization does not complete. + - If I(verify_volid="*NONE*") is specified and a volume serial is found on + the volume or minidisk, initialization does not complete. + - Note, this option is B(not) a boolean, leave it blank to skip the verification. + required: false + type: str + verify_offline: + description: + - Verify that the device is not online to any other systems, initialization + does not complete. + type: bool + required: false + default: true + volid: + description: + - The volume serial number used to initialize a volume or minidisk. + - Expects 1-6 alphanumeric, national ($,#,@) or special characters. + - A I(volid) with less than 6 characters will be padded with spaces. + - A I(volid) can also be referred to as volser or volume serial number. + - When I(volid) is not specified for a previously initialized volume or + minidisk, the volume serial number will remain unchanged. + required: false + type: str + vtoc_size: + description: + - The number of tracks to initialize the volume table of contents (VTOC) with. + - The VTOC will be placed in cylinder 0 head 1. + - If no tracks are specified it will default to the number of tracks in a + cylinder minus 1. Tracks in a cylinder vary based on direct-access storage + device (DASD) models, for 3390 a cylinder is 15 tracks. + required: false + type: int + index: + description: + - Create a volume table of contents (VTOC) index. + - The VTOC index enhances the performance of VTOC access. + - When set to I(false), no index will be created. + required: false + type: bool + default: true + sms_managed: + description: + - Specifies that the volume be managed by Storage Management System (SMS). + - If I(sms_managed) is I(true) then I(index) must also be I(true). + type: bool + required: false + default: true + verify_volume_empty: + description: + - Verify that no data sets other than the volume table of contents (VTOC) + index or the VSAM Volume Data Set(VVDS) exist on the target volume. + required: false + type: bool + default: true + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary and backup + datasets. + - The default HLQ is the Ansible user used to execute the module and if + that is not available, then the value C(TMPHLQ) is used. + required: false + type: str +seealso: +- module: zos_backup_restore +""" +EXAMPLES = r""" +- name: Initialize target volume with all default options. Target volume address is '1234', set volume name to 'DEMO01'. + Target volume is checked to ensure it is offline and contains no data sets. Volume is SMS managed, has an index + and VTOC size defined by the system. + zos_volume_init: + address: "1234" + volid: "DEMO01" + +- name: Initialize target volume with all default options and additionally check the existing volid + matches the given value 'DEMO02' before re-initializing the volume and renaming it to 'DEMO01'. + zos_volume_init: + address: "1234" + volid: "DEMO01" + verify_volid: "DEMO02" + +- name: Initialize non-SMS managed target volume with all the default options. + zos_volume_init: + address: "1234" + volid: "DEMO01" + sms_managed: no + +- name: Initialize non-SMS managed target volume with all the default options and + override the default high level qualifier (HLQ). + zos_volume_init: + address: 1234 + volid: DEMO01 + sms_managed: no + tmp_hlq: TESTUSR + +- name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as + the existing volume serial is 'ine8d8' and there are no pre-existing data sets on the target. The check to see + if volume is online before intialization is skipped. + zos_volume_init: + address: e8d8 + vtoc_size: 30 + index: yes + sms_managed: yes + volid: ine8d8 + verify_volid: ine8d8 + verify_volume_empty: yes + verify_offline: no + +- name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' + using Ansible loops. + zos_volume_init: + address: "090{{ item }}" + volid: "DEMO0{{ item }}" + loop: "{{ range(1, 4, 1) }}" +""" +RETURN = r""" +msg: + description: Failure message returned by module. + returned: failure + type: str + sample: "'Index' cannot be False for SMS managed volumes." +rc: + description: + - Return code from ICKDSF init command. + type: dict + returned: when ICKDSF program is run. +content: + description: + - Raw output from ICKDSF. + returned: when ICKDSF program is run. + type: list + elements: str + sample: + [ + "1ICKDSF - MVS/ESA DEVICE SUPPORT FACILITIES 17.0 TIME: 18:32:22 01/17/23 PAGE 1", + "0 ", + "0 INIT UNIT(0903) NOVERIFY NOVERIFYOFFLINE VOLID(KET678) -", + "0 NODS NOINDEX", + "-ICK00700I DEVICE INFORMATION FOR 0903 IS CURRENTLY AS FOLLOWS:", + "- PHYSICAL DEVICE = 3390", + "- STORAGE CONTROLLER = 2107", + "- STORAGE CONTROL DESCRIPTOR = E8", + "- DEVICE DESCRIPTOR = 0C", + "- ADDITIONAL DEVICE INFORMATION = 4A00003C", + "- TRKS/CYL = 15, # PRIMARY CYLS = 100", + "0ICK04000I DEVICE IS IN SIMPLEX STATE", + "0ICK00703I DEVICE IS OPERATED AS A MINIDISK", + " ICK00091I 0903 NED=002107.900.IBM.75.0000000BBA01", + "-ICK03091I EXISTING VOLUME SERIAL READ = KET987", + "-ICK03096I EXISTING VTOC IS LOCATED AT CCHH=X'0000 0001' AND IS 14 TRACKS.", + "0ICK01314I VTOC IS LOCATED AT CCHH=X'0000 0001' AND IS 14 TRACKS.", + "-ICK00001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 0", + "0 18:32:22 01/17/23", + "0 ", + "-ICK00002I ICKDSF PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 0", + ] +""" + +from ansible.module_utils.basic import AnsibleModule + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ickdsf # pylint: disable=import-error + + +def run_module(): + + module_args = dict( + address=dict(type="str", required=True), + verify_volid=dict(type="str", required=False), + verify_offline=dict(type="bool", required=False, default=True), + volid=dict(type="str", required=False), + vtoc_size=dict(type="int", required=False), + index=dict(type="bool", required=False, default=True), + sms_managed=dict(type="bool", required=False, default=True), + verify_volume_empty=dict(type="bool", required=False, default=True), + tmp_hlq=dict(type='str', required=False, default=None), + ) + + result = dict( + changed=False, + ) + + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=False + ) + + # sms managed and index are defined by ickdsf init as mutually exclusive. + if module.params['sms_managed'] and not module.params['index']: + module.fail_json(msg="'Index' cannot be False for SMS managed volumes.", **result) + + if module.check_mode: + module.exit_json(**result) + + result.update(ickdsf.init(module, result, module.params)) + + module.exit_json(**result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index f4c514265..bc1154de2 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021 +# Copyright (c) IBM Corporation 2020, 2021, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -31,8 +31,9 @@ DUMMY DATA == LINE 03 == """ + TEST_PS = "IMSTESTL.IMS01.DDCHKPT" -TEST_PS_VB = "IMSTESTL.IMS01.SPOOL1" +TEST_PS_VB = "USER.PRIV.PSVB" TEST_PDS = "IMSTESTL.COMNUC" TEST_PDS_MEMBER = "IMSTESTL.COMNUC(ATRQUERY)" TEST_VSAM = "FETCH.TEST.VS" @@ -78,6 +79,29 @@ /* """ +def create_and_populate_test_ps_vb(ansible_zos_module): + params=dict( + name=TEST_PS_VB, + type='SEQ', + record_format='VB', + record_length='3180', + block_size='3190' + ) + ansible_zos_module.all.zos_data_set(**params) + params = dict( + src=TEST_PS_VB, + block=TEST_DATA + ) + ansible_zos_module.all.zos_blockinfile(**params) + + +def delete_test_ps_vb(ansible_zos_module): + params=dict( + name=TEST_PS_VB, + state='absent' + ) + ansible_zos_module.all.zos_data_set(**params) + def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module @@ -154,6 +178,7 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): def test_fetch_sequential_data_set_variable_block(ansible_zos_module): hosts = ansible_zos_module + create_and_populate_test_ps_vb(ansible_zos_module) params = dict(src=TEST_PS_VB, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS_VB try: @@ -167,6 +192,7 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) + delete_test_ps_vb(ansible_zos_module) def test_fetch_partitioned_data_set(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_volume_init_func.py b/tests/functional/modules/test_zos_volume_init_func.py new file mode 100644 index 000000000..39952105c --- /dev/null +++ b/tests/functional/modules/test_zos_volume_init_func.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +import pytest + +__metaclass__ = type + +# TEST_VOL_ADDR = '0903' +# TEST_VOL_SER = 'KET999' +TEST_VOL_ADDR = '01A2' +TEST_VOL_SER = 'USER02' + +INDEX_CREATION_SUCCESS_MSG = 'VTOC INDEX CREATION SUCCESSFUL' +VTOC_LOC_MSG = "ICK01314I VTOC IS LOCATED AT CCHH=X'0000 0001' AND IS {:4d} TRACKS." + + +# Guard Rail to prevent unintentional initialization of targeted volume. +# If this test fails, either reset target volume serial to match +# verify_volid below or change value to match current volume serial on +# target. + +def test_guard_rail_and_setup(ansible_zos_module): + hosts = ansible_zos_module + + # remove all data sets from target volume. Expected to be the following 3 + hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL1", state="absent") + hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL2", state="absent") + hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL3", state="absent") + + params = dict( + address=TEST_VOL_ADDR, + verify_offline=False, + volid=TEST_VOL_SER, + verify_volid='USER02' + ) + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init( + address=params['address'], + verify_offline=params['verify_offline'], + volid=params['volid'], + verify_volid=params['verify_volid'] + ) + + for result in results.contacted.values(): + # assert result.get('changed') is True + assert result['rc'] == 0 + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +@pytest.mark.parametrize( + "params", [ + # min params test with index : true + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'index' : True + }), + # min params test with index : false + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'index' : False, + 'sms_managed' : False # default is True, which cannot be with no index. + }), + ] +) +def test_index_param(ansible_zos_module, params): + hosts = ansible_zos_module + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + content_str = ''.join(result.get("content")) + if params['index']: + assert INDEX_CREATION_SUCCESS_MSG in content_str + else: + assert INDEX_CREATION_SUCCESS_MSG not in content_str + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +# check that correct volume_addr is assigned to correct volid +def test_volid_address_assigned_correctly(ansible_zos_module): + hosts = ansible_zos_module + + params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + } + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + + # The display command issued queries a volume called $TEST_VOL_SER. The + # expected return values are: 'IEE455I UNIT STATUS NO DEVICES WITH REQUESTED + # ATTRIBUTES' or a line with several attributes including unit address + # example output: + # 'UNIT TYPE STATUS VOLSER VOLSTATE SS' + # '0903 3390 O DEMO01 PRIV/RSDNT 0' + # or: + # 'IEE455I UNIT STATUS NO DEVICES WITH REQUESTED ATTRIBUTES' + # (expected value $TEST_VOL_ADDR) and volume serial + # (expected value $TEST_VOL_SER). If those two match, then the 'volid' + # parameter is correctly assigned to the 'address' parameter. + + # Display command to print device status, volser and addr should correspond + display_cmd_output = list(hosts.all.zos_operator(cmd=f"D U,VOL={TEST_VOL_SER}").contacted.values())[0] + + # zos_operator output contains the command as well, only the last line of + # the output is relevant for the needs of this test case. + display_cmd_output = display_cmd_output.get('content')[-1] + + assert TEST_VOL_SER in display_cmd_output + +def test_no_index_sms_managed_mutually_exclusive(ansible_zos_module): + hosts = ansible_zos_module + + params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'index' : False, + 'sms_managed' : True + } + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + for result in results.contacted.values(): + assert result.get("changed") is False + assert "'Index' cannot be False" in result.get("msg") + +def test_vtoc_size_parm(ansible_zos_module): + hosts = ansible_zos_module + + params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'vtoc_size' : 8 + # 'vtoc_size' : 11 # test to test that this test handles 2 digit vtoc_index + } + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + content_str = ''.join(result.get("content")) + assert VTOC_LOC_MSG.format(params.get('vtoc_size')) in content_str + +@pytest.mark.parametrize( + "params", [ + # min params test; also sets up with expected attrs (eg existing volid) + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + }), + # verify_volid check - volid is known b/c previous test set it up. + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volid' : TEST_VOL_SER + }), + # verify_volume_empty check - no data sets on vol is known b/c previous test set it up. + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volume_empty' : True + }), + ] +) + + +def test_good_param_values(ansible_zos_module, params): + hosts = ansible_zos_module + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +@pytest.mark.parametrize( + "params,expected_rc", [ + # address not hexadecimal + ({ + 'address': 'XYZ', + 'verify_offline': False, + 'volid': TEST_VOL_SER + }, 12), + # address length too short + ({ + 'address': '01', + 'verify_offline': False, + 'volid': TEST_VOL_SER + }, 12), + # address specified is not accesible to current + ({ + 'address': '0000', + 'verify_offline': False, + 'volid': TEST_VOL_SER + }, 12), + # negative value for vtoc_size + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'vtoc_size': -10 + }, 12), + # note - "'vtoc_size': 0" gets treated as vtoc_size wasn't defined and invokes default behavior. + # volid check - incorrect existing volid + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volid': '000000' + }, 12), + # volid value too long + ({ + 'address': 'ABCDEFGHIJK', + 'verify_offline': False, + 'volid': TEST_VOL_SER, + }, 12), + # ({}, 0) + + ] +) + +def test_bad_param_values(ansible_zos_module, params, expected_rc): + hosts = ansible_zos_module + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get('failed') is True + assert result.get('rc') == expected_rc + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +# Note - volume needs to be sms managed for zos_data_set to work. Possible +# points of failure are: +# unable to init volume first time around +# unable to allocate data set +# unable to bring vol back online to delete data set +# If there is a data set remaining on the volume, that would interfere +# with other tests! + +def test_no_existing_data_sets_check(ansible_zos_module): + hosts = ansible_zos_module + + setup_params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'sms_managed': False # need non-sms managed to add data set on ECs + } + test_params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volume_empty': True, + } + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + try: + # set up/initialize volume properly so a data set can be added + hosts.all.zos_volume_init(**setup_params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + # allocate data set to volume + hosts.all.zos_data_set(name="USER.PRIVATE.TESTDS", type='pds', volumes=TEST_VOL_SER) + + # take volume back offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + # run vol_init against vol with data set on it. + results = hosts.all.zos_volume_init(**test_params) + + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get('failed') is True + assert result.get('rc') == 12 + + # clean up just in case of failures, volume needs to be reset for other + # tests. Not sure what to do for DatasetDeleteError + finally: + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + # remove data set + hosts.all.zos_data_set(name="USER.PRIVATE.TESTDS", state='absent') + + +# Note - technically verify_offline is not REQUIRED but it defaults to True +# and the volumes on the EC systems do not seem to go fully offline. +# Therefore, while testing against the EC machines, the verify_offline +# check needs to be skipped in order for ickdsf to be invoked. + +def test_minimal_params(ansible_zos_module): + hosts = ansible_zos_module + + params = dict( + address=TEST_VOL_ADDR, + verify_offline=False, + volid=TEST_VOL_SER + ) + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init( + address=params['address'], + verify_offline=params['verify_offline'], + volid=params['volid'] + ) + + for result in results.contacted.values(): + assert result.get('changed') is True + assert result['rc'] == 0 + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 01b86286b..74db3a282 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -79,4 +79,7 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 874e6d4de..420528c74 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -59,7 +59,6 @@ plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_lineinfile.py compile-2.7!skip # Python 2.7 f string is not supported plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_mount.py import-2.6!skip # Python 2.6 is unsupported @@ -81,3 +80,6 @@ plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index cc80ef80c..420528c74 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -80,3 +80,6 @@ plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 67be075f7..8b4540038 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -32,3 +32,4 @@ plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 67be075f7..8b4540038 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -32,3 +32,4 @@ plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 67be075f7..8b4540038 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -32,3 +32,4 @@ plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index d9b87031d..4a6c8a778 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -78,4 +78,7 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported From a856d18e9797f1d2eacf4ca7ab350b9f4cc3b390 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 21 Apr 2023 09:55:54 -0700 Subject: [PATCH 100/413] Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization --- .../fragments/743-zos_copy-encoding-bugs.yml | 9 + plugins/modules/zos_copy.py | 186 +++++++++++++----- .../functional/modules/test_zos_copy_func.py | 100 +++++++++- 3 files changed, 248 insertions(+), 47 deletions(-) create mode 100644 changelogs/fragments/743-zos_copy-encoding-bugs.yml diff --git a/changelogs/fragments/743-zos_copy-encoding-bugs.yml b/changelogs/fragments/743-zos_copy-encoding-bugs.yml new file mode 100644 index 000000000..1b58ddabe --- /dev/null +++ b/changelogs/fragments/743-zos_copy-encoding-bugs.yml @@ -0,0 +1,9 @@ +bugfixes: +- zos_copy - Fixes a bug where files not encoded in IBM-1047 + would trigger an error while computing the record length + for a new destination dataset. Issue 664. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_copy - Fixes a bug where the code for fixing an issue with + newlines in files (issue 599) would use the wrong encoding + for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 2fe9ffd4c..739c0d8d0 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -926,15 +926,20 @@ def file_has_crlf_endings(self, src): {bool} -- True if the file uses CRLF endings, False if it uses LF ones. """ + # Python has to read the file in binary mode to not mask CRLF + # endings or enable universal newlines. If we used encoding="cp037", + # we would get '\n' as the line ending even when the file uses '\r\n'. with open(src, "rb") as src_file: - # readline() will read until it finds a \n. - content = src_file.readline() + # Reading the file in 1024-byte chunks. + content = src_file.read(1024) - # In EBCDIC, \r\n are bytes 0d and 15, respectively. - if content.endswith(b'\x0d\x15'): - return True - else: - return False + while content: + # In EBCDIC, \r\n are bytes 0d and 15, respectively. + if b'\x0d\x15' in content: + return True + content = src_file.read(1024) + + return False def create_temp_with_lf_endings(self, src): """Creates a temporary file with the same content as src but without @@ -955,10 +960,11 @@ def create_temp_with_lf_endings(self, src): with open(converted_src, "wb") as converted_file: with open(src, "rb") as src_file: - current_line = src_file.read() - converted_file.write(current_line.replace(b'\x0d', b'')) + chunk = src_file.read(1024) + # In IBM-037, \r is the byte 0d. + converted_file.write(chunk.replace(b'\x0d', b'')) - self._tag_file_encoding(converted_src, encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET) + self._tag_file_encoding(converted_src, "IBM-037") return converted_src except Exception as err: @@ -1314,6 +1320,7 @@ def copy_to_pdse( src_ds_type, src_member=None, dest_member=None, + encoding=None, ): """Copy source to a PDS/PDSE or PDS/PDSE member. @@ -1323,12 +1330,13 @@ def copy_to_pdse( Arguments: src {str} -- Path to USS file/directory or data set name. temp_path {str} -- Path to the location where the control node - transferred data to - conv_path {str} -- Path to the converted source file/directory - dest {str} -- Name of destination data set - src_ds_type {str} -- The type of source + transferred data to. + conv_path {str} -- Path to the converted source file/directory. + dest {str} -- Name of destination data set. + src_ds_type {str} -- The type of source. src_member {bool, optional} -- Member of the source data set to copy. - dest_member {str, optional} -- Name of destination member in data set + dest_member {str, optional} -- Name of destination member in data set. + encoding {dict, optional} -- Dictionary with encoding options. """ new_src = conv_path or temp_path or src src_members = [] @@ -1341,7 +1349,11 @@ def copy_to_pdse( else: path, dirs, files = next(os.walk(new_src)) - src_members = [os.path.normpath("{0}/{1}".format(path, file)) for file in files] + src_members = [ + os.path.normpath("{0}/{1}".format(path, file)) if self.is_binary + else normalize_line_endings("{0}/{1}".format(path, file), encoding) + for file in files + ] dest_members = [ dest_member if dest_member else data_set.DataSet.get_member_name_from_file(file) @@ -1453,7 +1465,7 @@ def get_file_record_length(file): """ max_line_length = 0 - with open(file, "r") as src_file: + with open(file, "r", encoding="utf-8") as src_file: current_line = src_file.readline() while current_line: @@ -2078,6 +2090,53 @@ def allocate_destination_data_set( return True +def normalize_line_endings(src, encoding=None): + """ + Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes + its line endings to LF. + + Arguments: + src (str) -- Path of a USS file. + encoding (dict, optional) -- Encoding options for the module. + + Returns: + str -- Path to the normalized file. + """ + # Before copying into a destination dataset, we'll make sure that + # the source file doesn't contain any carriage returns that would + # result in empty records in the destination. + # Due to the differences between encodings, we'll normalize to IBM-037 + # before checking the EOL sequence. + enc_utils = encode.EncodeUtils() + src_tag = enc_utils.uss_file_tag(src) + copy_handler = CopyHandler(AnsibleModuleHelper(dict())) + + if src_tag == "untagged": + # This should only be true when src is a remote file and no encoding + # was specified by the user. + if not encoding: + encoding = {"from": encode.Defaults.get_default_system_charset()} + src_tag = encoding["from"] + + if src_tag != "IBM-037": + fd, converted_src = tempfile.mkstemp() + os.close(fd) + + enc_utils.uss_convert_encoding( + src, + converted_src, + src_tag, + "IBM-037" + ) + copy_handler._tag_file_encoding(converted_src, "IBM-037") + src = converted_src + + if copy_handler.file_has_crlf_endings(src): + src = copy_handler.create_temp_with_lf_endings(src) + + return src + + def run_module(module, arg_def): # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError @@ -2160,6 +2219,7 @@ def run_module(module, arg_def): # and destination datasets, if needed. # ******************************************************************** dest_member_exists = False + converted_src = None try: # If temp_path, the plugin has copied a file from the controller to USS. if temp_path or "/" in src: @@ -2167,6 +2227,38 @@ def run_module(module, arg_def): if remote_src and os.path.isdir(src): is_src_dir = True + + # When the destination is a dataset, we'll normalize the source + # file to UTF-8 for the record length computation as Python + # generally uses UTF-8 as the default encoding. + if not is_uss: + new_src = temp_path or src + new_src = os.path.normpath(new_src) + # Normalizing encoding when src is a USS file (only). + encode_utils = encode.EncodeUtils() + src_tag = encode_utils.uss_file_tag(new_src) + # Normalizing to UTF-8. + if not is_src_dir and src_tag != "UTF-8": + # If untagged, assuming the encoding/tag is the system's default. + if src_tag == "untagged" or src_tag is None: + if encoding: + src_tag = encoding["from"] + else: + src_tag = encode.Defaults.get_default_system_charset() + + # Converting the original src to a temporary one in UTF-8. + fd, converted_src = tempfile.mkstemp() + os.close(fd) + encode_utils.uss_convert_encoding( + new_src, + converted_src, + src_tag, + "UTF-8" + ) + + # Creating the handler just for tagging, we're not copying yet! + copy_handler = CopyHandler(module, is_binary=is_binary) + copy_handler._tag_file_encoding(converted_src, "UTF-8") else: if data_set.DataSet.data_set_exists(src_name): if src_member and not data_set.DataSet.data_set_member_exists(src): @@ -2344,6 +2436,17 @@ def run_module(module, arg_def): emergency_backup = data_set.DataSet.temp_name() data_set.DataSet.allocate_model_data_set(emergency_backup, dest_name) + # Here we'll use the normalized source file by shadowing the + # original one. This change applies only to the + # allocate_destination_data_set call. + if converted_src: + if remote_src: + original_src = src + src = converted_src + else: + original_temp = temp_path + temp_path = converted_src + try: if not is_uss: res_args["changed"] = allocate_destination_data_set( @@ -2360,11 +2463,22 @@ def run_module(module, arg_def): if dest_exists and not force: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) erase_backup(emergency_backup, dest_ds_type) + if converted_src: + if remote_src: + src = original_src + else: + temp_path = original_temp module.fail_json( msg="Unable to allocate destination data set: {0}".format(str(err)), dest_exists=dest_exists ) + if converted_src: + if remote_src: + src = original_src + else: + temp_path = original_temp + # ******************************************************************** # Encoding conversion is only valid if the source is a local file, # local directory or a USS file/directory. @@ -2433,35 +2547,8 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_SEQ: if src_ds_type == "USS" and not is_binary: - # Before copying into the destination dataset, we'll make sure that - # the source file doesn't contain any carriage returns that would - # result in empty records in the destination. - # Due to the differences between encodings, we'll normalize to IBM-037 - # before checking the EOL sequence. new_src = conv_path or temp_path or src - enc_utils = encode.EncodeUtils() - src_tag = enc_utils.uss_file_tag(new_src) - - if src_tag == "untagged": - src_tag = encode.Defaults.DEFAULT_EBCDIC_USS_CHARSET - - if src_tag not in encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET: - fd, converted_src = tempfile.mkstemp() - os.close(fd) - - enc_utils.uss_convert_encoding( - new_src, - converted_src, - src_tag, - encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET - ) - copy_handler._tag_file_encoding(converted_src, encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET) - new_src = converted_src - - if copy_handler.file_has_crlf_endings(new_src): - new_src = copy_handler.create_temp_with_lf_endings(new_src) - - conv_path = new_src + conv_path = normalize_line_endings(new_src, encoding) copy_handler.copy_to_seq( src, @@ -2484,7 +2571,14 @@ def run_module(module, arg_def): ) pdse_copy_handler.copy_to_pdse( - src, temp_path, conv_path, dest_name, src_ds_type, src_member=src_member, dest_member=dest_member + src, + temp_path, + conv_path, + dest_name, + src_ds_type, + src_member=src_member, + dest_member=dest_member, + encoding=encoding ) res_args["changed"] = True dest = dest.upper() diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index dbc76eeff..c5f660a6c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021 +# Copyright (c) IBM Corporation 2020, 2021, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -41,6 +41,8 @@ DUMMY DATA ---- LINE 007 ------ """ +DUMMY_DATA_CRLF = b"00000001 DUMMY DATA\r\n00000002 DUMMY DATA\r\n" + VSAM_RECORDS = """00000001A record 00000002A record 00000003A record @@ -115,6 +117,12 @@ def populate_dir(dir_path): infile.write(DUMMY_DATA) +def populate_dir_crlf_endings(dir_path): + for i in range(5): + with open(os.path.join(dir_path, "file{0}".format(i)), "wb") as infile: + infile.write(DUMMY_DATA_CRLF) + + def populate_partitioned_data_set(hosts, name, ds_type, members=None): """Creates a new partitioned data set and inserts records into various members of it. @@ -1056,14 +1064,72 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): assert v_recl.get("rc") == 0 stdout = v_recl.get("stdout").split() assert len(stdout) == 5 + # Verifying the dataset type (sequential). assert stdout[1] == "PS" + # Verifying the record format is Fixed Block. assert stdout[2] == "FB" + # Verifying the record length is 31. The dummy data has 31 + # characters per line. assert stdout[3] == "31" finally: hosts.all.zos_data_set(name=dest, state="absent") os.remove(src) +@pytest.mark.uss +@pytest.mark.seq +def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): + hosts = ansible_zos_module + dest = "USER.TEST.SEQ.FUNCTEST" + + fd, src = tempfile.mkstemp() + os.close(fd) + with open(src, "wb") as infile: + infile.write(DUMMY_DATA_CRLF) + + try: + hosts.all.zos_data_set(name=dest, state="absent") + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=False, + is_binary=False + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + verify_recl = hosts.all.shell( + cmd="dls -l {0}".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert len(v_cp.get("stdout_lines")) == 2 + for v_recl in verify_recl.contacted.values(): + assert v_recl.get("rc") == 0 + stdout = v_recl.get("stdout").split() + assert len(stdout) == 5 + # Verifying the dataset type (sequential). + assert stdout[1] == "PS" + # Verifying the record format is Fixed Block. + assert stdout[2] == "FB" + # Verifying the record length is 19. The dummy data has 19 + # characters per line. + assert stdout[3] == "19" + finally: + hosts.all.zos_data_set(name=dest, state="absent") + os.remove(src) + + @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ @@ -1651,6 +1717,38 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): hosts.all.zos_data_set(name=dest, state="absent") +@pytest.mark.uss +@pytest.mark.pdse +def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): + hosts = ansible_zos_module + dest = "USER.TEST.PDSE.FUNCTEST" + + temp_path = tempfile.mkdtemp() + src_basename = "source/" + source_path = "{0}/{1}".format(temp_path, src_basename) + + try: + os.mkdir(source_path) + populate_dir_crlf_endings(source_path) + + copy_res = hosts.all.zos_copy(src=source_path, dest=dest) + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}({1})'\"".format(dest, "FILE2"), + executable=SHELL_EXECUTABLE, + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == dest + for result in verify_copy.contacted.values(): + assert result.get("rc") == 0 + assert len(result.get("stdout_lines")) == 2 + finally: + shutil.rmtree(temp_path) + hosts.all.zos_data_set(name=dest, state="absent") + + @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src_type", ["pds", "pdse"]) From 673bd70349b3450066917de9d435effc316a4dff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 24 Apr 2023 15:14:54 -0600 Subject: [PATCH 101/413] Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../731-zos_linefile-disposition_share.yaml | 6 + plugins/modules/zos_lineinfile.py | 39 +++- .../modules/test_zos_blockinfile_func.py | 35 ++- .../modules/test_zos_lineinfile_func.py | 212 ++++++++++++++++-- tests/helpers/zos_blockinfile_helper.py | 194 +++++++++++++++- tests/helpers/zos_lineinfile_helper.py | 181 ++++++++++++++- 6 files changed, 640 insertions(+), 27 deletions(-) create mode 100644 changelogs/fragments/731-zos_linefile-disposition_share.yaml diff --git a/changelogs/fragments/731-zos_linefile-disposition_share.yaml b/changelogs/fragments/731-zos_linefile-disposition_share.yaml new file mode 100644 index 000000000..da6dbc19b --- /dev/null +++ b/changelogs/fragments/731-zos_linefile-disposition_share.yaml @@ -0,0 +1,6 @@ +minor_changes: +- zos_lineinfile - would access data sets with exclusive access so no other + task can read the data, this enhancement allows for a data set to be opened + with a disposition set to share so that other tasks can access the data when + option `force` is set to `true`. + (https://github.com/ansible-collections/ibm_zos_core/pull/731) \ No newline at end of file diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 7a26ce299..c2a7a719c 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -173,6 +173,18 @@ required: false type: str default: IBM-1047 + force: + description: + - Specifies that the data set can be shared with others during an update + which results in the data set you are updating to be simultaneously + updated by others. + - This is helpful when a data set is being used in a long running process + such as a started task and you are wanting to update or read. + - The C(force) option enables sharing of data sets through the disposition + I(DISP=SHR). + required: false + type: bool + default: false notes: - It is the playbook author or user's responsibility to avoid files that should not be encoded, such as binary files. A user is described @@ -218,6 +230,14 @@ regexp: '^(.*)User(\d+)m(.*)$' line: '\1APPUser\3' backrefs: yes + +- name: Add a line to a member while a task is in execution + zos_lineinfile: + src: SOME.PARTITIONED.DATA.SET(DATA) + insertafter: EOF + line: 'Should be a working test now' + force: True + """ RETURN = r""" @@ -271,7 +291,7 @@ DS_TYPE = ['PS', 'PO'] -def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs): +def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs, force): """Replace a line with the matching regex pattern Insert a line before/after the matching pattern Insert a line at BOF/EOF @@ -292,6 +312,7 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs encoding: {str} -- Encoding of the src. first_match: {bool} -- Take the first matching regex pattern. backrefs: {bool} -- Back reference + force: {bool} -- force for modify a member part of a task in execution Returns: str -- Information in JSON format. keys: @@ -310,10 +331,11 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs backref=backrefs, state=True, debug=True, + force=force, ) -def absent(src, line, regexp, encoding): +def absent(src, line, regexp, encoding, force): """Delete lines with matching regex pattern Arguments: @@ -322,6 +344,7 @@ def absent(src, line, regexp, encoding): regexp will be ignored. regexp: {str} -- The regular expression to look for in every line of the src. encoding: {str} -- Encoding of the src. + force: {bool} -- force for modify a member part of a task in execution Returns: str -- Information in JSON format. keys: @@ -329,7 +352,7 @@ def absent(src, line, regexp, encoding): found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the source was modified. """ - return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True) + return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True, force=force) def quotedString(string): @@ -364,7 +387,8 @@ def main(): backup_name=dict(type='str', required=False, default=None), firstmatch=dict(type='bool', default=False), encoding=dict(type='str', default="IBM-1047"), - tmp_hlq=dict(type='str', required=False, default=None) + tmp_hlq=dict(type='str', required=False, default=None), + force=dict(type='bool', required=False, default=False) ) module = AnsibleModule( argument_spec=module_args, @@ -385,6 +409,7 @@ def main(): firstmatch=dict(arg_type="bool", required=False, default=False), backrefs=dict(arg_type="bool", dependencies=['regexp'], required=False, default=False), tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), + force=dict(arg_type='bool', required=False, default=False), mutually_exclusive=[["insertbefore", "insertafter"]],) try: @@ -406,6 +431,7 @@ def main(): ins_bef = parsed_args.get('insertbefore') encoding = parsed_args.get('encoding') tmphlq = parsed_args.get('tmp_hlq') + force = parsed_args.get('force') if parsed_args.get('state') == 'present': if backrefs and regexp is None: @@ -453,9 +479,10 @@ def main(): # state=present, insert/replace a line with matching regex pattern # state=absent, delete lines with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, backrefs) + return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, + backrefs, force) else: - return_content = absent(src, quotedString(line), quotedString(regexp), encoding) + return_content = absent(src, quotedString(line), quotedString(regexp), encoding, force) stdout = return_content.stdout_response stderr = return_content.stderr_response rc = return_content.rc diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index f6b735487..7cd92c9e5 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -17,6 +17,8 @@ DsGeneral, DsNotSupportedHelper, DsGeneralResultKeyMatchesRegex, + DsGeneralForce, + DsGeneralForceFail, ) import os import sys @@ -238,6 +240,14 @@ test_ds_block_insertafter_eof_with_backup_name=dict( block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=MVS_BACKUP_DS), + test_ds_block_insertafter_regex_force=dict( + path="",insertafter="ZOAU_ROOT=", + block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + state="present", force=True), + test_ds_block_insertafter_regex_force_fail=dict( + path="",insertafter="ZOAU_ROOT=", + block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + state="present", force=False), expected=dict(test_uss_block_insertafter_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -1498,6 +1508,17 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, encodi ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") +@pytest.mark.ds +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): + TEST_ENV["DS_TYPE"] = dstype + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_INFO["test_ds_block_insertafter_regex_force"], + TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"] + ) + + ######################### # Negative tests ######################### @@ -1545,4 +1566,14 @@ def test_ds_not_supported(ansible_zos_module, dstype): DsNotSupportedHelper( TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], ansible_zos_module, TEST_ENV, TEST_INFO["test_uss_block_insertafter_regex"] - ) \ No newline at end of file + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): + TEST_ENV["DS_TYPE"] = dstype + DsGeneralForceFail( + ansible_zos_module, TEST_ENV, + TEST_INFO["test_ds_block_insertafter_regex_force_fail"], + ) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index c001ebb0d..7b77c155d 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -17,6 +17,8 @@ DsGeneral, DsNotSupportedHelper, DsGeneralResultKeyMatchesRegex, + DsGeneralForceFail, + DsGeneralForce, ) import os import sys @@ -120,6 +122,23 @@ test_ds_line_replace_nomatch_insertbefore_nomatch=dict(test_name="T11"), test_ds_line_absent=dict(test_name="T12"), test_ds_line_tmp_hlq_option=dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ"), + test_ds_line_force=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=True), + test_ds_line_force_fail=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=False), + test_ds_line_replace_force=dict(path="",regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", + state="present",force=True), + test_ds_line_insertafter_regex_force=dict(path="",insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", + state="present",force=True), + test_ds_line_insertbefore_regex_force=dict(path="",insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present",force=True), + test_ds_line_insertbefore_bof_force=dict(path="",insertbefore="BOF", line="# this is file is for setting env vars", + state="present",force=True), + test_ds_line_replace_match_insertafter_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertafter="PATH=", + line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), + test_ds_line_replace_match_insertbefore_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", + state="present",force=True), + test_ds_line_replace_nomatch_insertafter_match_force=dict(path="",regexp="abcxyz", insertafter="ZOAU_ROOT=", + line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), + test_ds_line_replace_nomatch_insertbefore_match_force=dict(path="",regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", + state="present",force=True), expected=dict(test_uss_line_replace="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -568,7 +587,42 @@ export PYTHONPATH export PKG_CONFIG_PATH export PYTHON_HOME -export _BPXK_AUTOCVT"""), +export _BPXK_AUTOCVT""", + test_ds_line_force="""if [ -z STEPLIB ] && tty -s; +then + export STEPLIB=none + exec -a 0 SHELL +fi +TZ=PST8PDT +export TZ +LANG=C +export LANG +readonly LOGNAME +PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin +export PATH +LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +export LIBPATH +NLSPATH=/usr/lib/nls/msg/%L/%N +export NLSPATH +MANPATH=/usr/man/%L +export MANPATH +MAIL=/usr/mail/LOGNAME +export MAIL +umask 022 +ZOAU_ROOT=/usr/lpp/zoautil/v100 +ZOAUTIL_DIR=/usr/lpp/zoautil/v100 +PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig +PYTHON_HOME=/usr/lpp/izoda/v110/anaconda +_BPXK_AUTOCVT=ON +export ZOAU_ROOT +export ZOAUTIL_DIR +export ZOAUTIL_DIR +export PYTHONPATH +export PKG_CONFIG_PATH +export PYTHON_HOME +export _BPXK_AUTOCVT +export ZOAU_ROOT"""), ) ######################### @@ -708,20 +762,6 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): # Dataset test cases ######################### -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_INFO["test_uss_line_replace"]["line"] = 'ZOAU_ROOT=/mvsutil-develop_dsed' - DsGeneral( - TEST_INFO["test_ds_line_replace"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace"] - ) - - @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) @pytest.mark.parametrize("encoding", ENCODING) @@ -909,3 +949,145 @@ def test_ds_not_supported(ansible_zos_module, dstype): TEST_INFO["test_ds_line_replace"]["test_name"], ansible_zos_module, TEST_ENV, TEST_INFO["test_uss_line_replace"] ) + + +######################### +# Dataset test cases with force +######################### + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_force"], + TEST_INFO["expected"]["test_ds_line_force"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_force_fail(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForceFail( + ansible_zos_module, TEST_ENV, + TEST_INFO["test_ds_line_force_fail"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_force"], + TEST_INFO["expected"]["test_uss_line_replace"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_insertafter_regex_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_insertafter_regex_force"], + TEST_INFO["expected"]["test_uss_line_insertafter_regex"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_insertbefore_regex_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_insertbefore_regex_force"], + TEST_INFO["expected"]["test_uss_line_insertbefore_regex"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_insertbefore_bof_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_insertbefore_bof_force"], + TEST_INFO["expected"]["test_uss_line_insertbefore_bof"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_match_insertafter_ignore_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_match_insertafter_ignore_force"], + TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_match_insertbefore_ignore_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_match_insertbefore_ignore_force"], + TEST_INFO["expected"]["test_uss_line_replace_match_insertbefore_ignore"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_nomatch_insertafter_match_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_nomatch_insertafter_match_force"], + TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertafter_match"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_nomatch_insertbefore_match_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_nomatch_insertbefore_match_force"], + TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertbefore_match"] + ) diff --git a/tests/helpers/zos_blockinfile_helper.py b/tests/helpers/zos_blockinfile_helper.py index 0a77e4eda..f5aa178fe 100644 --- a/tests/helpers/zos_blockinfile_helper.py +++ b/tests/helpers/zos_blockinfile_helper.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -14,12 +14,40 @@ from __future__ import absolute_import, division, print_function from shellescape import quote from pprint import pprint +import time import re __metaclass__ = type +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + + def set_uss_test_env(test_name, hosts, test_env): test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name try: @@ -117,8 +145,8 @@ def DsGeneral(test_name, ansible_zos_module, test_env, test_info, expected): results = hosts.all.shell(cmd=cmdStr) for result in results.contacted.values(): pprint(result) - assert result.get("stdout") == expected - # assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') + #assert result.get("stdout") == expected + assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') clean_ds_test_env(test_env["DS_NAME"], hosts) return blockinfile_results @@ -155,3 +183,163 @@ def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test for key in kwargs: assert re.match(kwargs.get(key), result.get(key)) clean_ds_test_env(test_env["DS_NAME"], hosts) + + +def DsGeneralForce(ansible_zos_module, test_env, test_info, expected): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + if test_env["DS_TYPE"] == "SEQ": + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + hosts = ansible_zos_module + try: + # set up: + # create pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": test_env["DS_NAME"], + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # write memeber to verify cases + # print(test_env["TEST_CONT"]) + if test_env["DS_TYPE"] in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) + if test_env["ENCODING"]: + test_info["encoding"] = test_env["ENCODING"] + hosts.all.shell(cmd=cmdStr) + cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + if test_env["ENCODING"] != 'IBM-1047': + hosts.all.zos_encode( + src=TEMP_FILE, + dest=test_env["DS_NAME"], + encoding={ + "from": "IBM-1047", + "to": test_env["ENCODING"], + }, + ) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + blockinfile_results = hosts.all.zos_blockinfile(**test_info) + for result in blockinfile_results.contacted.values(): + assert result.get("changed") == True + + + if test_env["ENCODING"] == 'IBM-1047': + cmdStr = "cat \"//'{0}'\" ".format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + pprint(result) + assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') + else: + cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") == True + finally: + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + return blockinfile_results + + +def DsGeneralForceFail(ansible_zos_module, test_env, test_info): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + hosts = ansible_zos_module + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + try: + # set up: + # create pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # write memeber to verify cases + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_info["path"])) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + blockinfile_results = hosts.all.zos_blockinfile(**test_info) + for result in blockinfile_results.contacted.values(): + pprint(result) + assert result.get("changed") == False + assert result.get("failed") == True + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file diff --git a/tests/helpers/zos_lineinfile_helper.py b/tests/helpers/zos_lineinfile_helper.py index 2c695364b..bac392e80 100644 --- a/tests/helpers/zos_lineinfile_helper.py +++ b/tests/helpers/zos_lineinfile_helper.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -13,11 +13,38 @@ from __future__ import absolute_import, division, print_function from shellescape import quote +import time from pprint import pprint import re __metaclass__ = type +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + def set_uss_test_env(test_name, hosts, test_env): test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name @@ -159,3 +186,155 @@ def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test for key in kwargs: assert re.match(kwargs.get(key), result.get(key)) clean_ds_test_env(test_env["DS_NAME"], hosts) + + +def DsGeneralForce(ansible_zos_module, test_env, test_text, test_info, expected): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + if test_env["DS_TYPE"] == "SEQ": + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + hosts = ansible_zos_module + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_text, TEMP_FILE)) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": test_env["DS_NAME"], + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # write memeber to verify cases + if test_env["DS_TYPE"] in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) + if test_env["ENCODING"]: + test_info["encoding"] = test_env["ENCODING"] + hosts.all.shell(cmd=cmdStr) + cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + if test_env["ENCODING"] != 'IBM-1047': + hosts.all.zos_encode( + src=TEMP_FILE, + dest=test_env["DS_NAME"], + encoding={ + "from": "IBM-1047", + "to": test_env["ENCODING"], + }, + ) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + # call line infile to see results + results = hosts.all.zos_lineinfile(**test_info) + pprint(vars(results)) + + if test_env["ENCODING"] == 'IBM-1047': + cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("stdout") == expected + else: + cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") == True + #assert result.get("stdout") == expected + + finally: + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + +def DsGeneralForceFail(ansible_zos_module, test_env, test_info): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + hosts = ansible_zos_module + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + try: + # set up: + # create pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + # pause to ensure c code acquires lock + time.sleep(5) + # call line infile to see results + results = hosts.all.zos_lineinfile(**test_info) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") == False + assert result.get("failed") == True + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From be97d432bc6af27b289b9a70a9e2c9bcc76f9308 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 24 Apr 2023 16:19:23 -0600 Subject: [PATCH 102/413] remove duplicate function (#753) --- plugins/module_utils/encode.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index fa84c6fb3..cfcfd2bf0 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -496,25 +496,6 @@ def uss_file_tag(self, file_path): except Exception: return None - def uss_tag_encoding(self, file_path, tag): - """Tag the file/directory specified with the given code set. - If `file_path` is a directory, all of the files and subdirectories will - be tagged recursively. - - Arguments: - file_path {str} -- Absolute file path to tag. - tag {str} -- Code set to tag the file/directory. - - Raises: - TaggingError: When the chtag command fails. - """ - is_dir = os.path.isdir(file_path) - - tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) - rc, out, err = self.module.run_command(tag_cmd) - if rc != 0: - raise TaggingError(file_path, tag, rc, out, err) - class EncodeError(Exception): def __init__(self, message): From 3878e6bb2138d77684c2b41049f0ce5e4963c3e3 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 26 Apr 2023 09:27:05 -0700 Subject: [PATCH 103/413] Update branch production branch Main with release v1.5.0 content (#756) (#758) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> --- CHANGELOG.rst | 117 +++++++---- README.md | 4 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 183 +++++++++++++++++- docs/source/release_notes.rst | 67 ++++++- galaxy.yml | 4 +- meta/ibm_zos_core_meta.yml | 2 +- meta/runtime.yml | 2 +- plugins/module_utils/encode.py | 2 +- plugins/module_utils/job.py | 2 +- plugins/modules/zos_gather_facts.py | 2 +- tests/dependencyfinder.py | 32 +++ .../modules/test_zos_job_submit_func.py | 2 +- 13 files changed, 366 insertions(+), 55 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 55555d11c..98cab36f3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,19 +5,18 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.5.0-beta.1 -============= +v1.5.0 +====== Release Summary --------------- -Release Date: '2022-11-17' +Release Date: '2023-04-21' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Major Changes ------------- @@ -66,22 +65,35 @@ Deprecated Features Bugfixes -------- +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/641) +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/746) - zos_copy - Fixes a bug such that the module fails when copying files from a directory needing also to be encoded. The failure would also delete the `src` which was not desirable behavior. Fixes deletion of src on encoding error. (https://github.com/ansible-collections/ibm_zos_core/pull/321). -- zos_copy - Fixes wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - Fixes a bug where copying a member from a loadlib to another loadlib fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) +- zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/725) - zos_copy - Fixes a bug where if a destination has accented characters in its content, the module would fail when trying to determine if it is empty. (https://github.com/ansible-collections/ibm_zos_core/pull/634) +- zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/725) - zos_copy - Fixes a bug where the computed record length for a new destination dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) +- zos_copy - Fixes wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - module was updated to correct a bug in the case when the destination (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module would fail in determining if the PDSE actually existed and try to create it when it already existed resulting in an error that would prevent the module from correctly executing. (https://github.com/ansible-collections/ibm_zos_core/pull/327) - zos_data_set - Fixes a bug such that the module will delete a catalogued data set over an uncatalogued data set even though the volume is provided for the uncataloged data set. This is unexpected behavior and does not align to documentation; correct behavior is that when a volume is provided that is the first place the module should look for the data set, whether or not it is cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/325). -- zos_data_set - Fixes a bug where the default record format FB was actually never enforced and when enforced it would cause VSAM creation to fail with a Dynalloc failure. This also cleans up some of the options that are set by default when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) +- zos_data_set - Fixes a bug where the default record format FB was actually never enforced and when enforced it would cause VSAM creation to fail with a Dynalloc failure. Also cleans up some of the options that are set by default when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) - zos_fetch - Updates the modules behavior when fetching VSAM data sets such that the maximum record length is now determined when creating a temporary data set to copy the VSAM data into and a variable-length (VB) data set is used. (https://github.com/ansible-collections/ibm_zos_core/pull/350) - zos_job_output - Fixes a bug that returned all ddname's when a specific ddnamae was provided. Now a specific ddname can be returned and all others ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/334) - zos_job_query - was updated to correct a boolean condition that always evaluated to "CANCELLED". (https://github.com/ansible-collections/ibm_zos_core/pull/312). +- zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would result in a `type` error that a stack trace would result in the response, issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job encounters a security exception no job log would would result in the response, issue 684. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` to return a response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that no job log would result in the response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is purged by the system that a stack trace would result in the response, issue 681. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that a stack trace would result in the response, issue 623. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when resources (data sets) identified in JCL did not exist such that a stack trace would result in the response, issue 624. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue where the response did not include the job log when a non-zero return code would occur, issue 655. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - zos_mount - Fixes option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/511) - zos_mvs_raw - module was updated to correct a bug when no DD statements were provided. The module when no option was provided for `dds` would error, a default was provided to correct this behavior. (https://github.com/ansible-collections/ibm_zos_core/pull/336) - zos_operator - Fixes case sensitive error checks, invalid, error & unidentifiable (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_operator - Fixes such that specifying wait_time_s would throw an error (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_operator - Fixes the wait_time_s to default to 1 second (https://github.com/ansible-collections/ibm_zos_core/issues/389). +- zos_operator - fixed incorrect example descriptions and updated the doc to highlight the deprecated option `wait`. (https://github.com/ansible-collections/ibm_zos_core/pull/648) - zos_operator - was updated to correct missing verbosity content when the option verbose was set to True. zos_operator - was updated to correct the trailing lines that would appear in the result content. (https://github.com/ansible-collections/ibm_zos_core/pull/400). New Modules @@ -89,50 +101,41 @@ New Modules - ibm.ibm_zos_core.zos_gather_facts - Gather z/OS system facts. -v1.4.0-beta.2 -============= +v1.4.1 +====== Release Summary --------------- -Release Date: '2022-10-17' +Release Date: '2023-04-18' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ -Minor Changes -------------- - -- zos_copy - enhanced the force option when `force=true` and the remote file or data set `dest` is NOT empty, the `dest` will be deleted and recreated with the `src` data set attributes, otherwise it will be recreated with the `dest` data set attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - fixes a bug that when a directory is copied from the controller to the managed node and a mode is set, the mode is applied to the directory on the managed node. If the directory being copied contains files and mode is set, mode will only be applied to the files being copied not the pre-existing files. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - fixes a bug where options were not defined in the module argument spec that will result in error when running `ansible-core` v2.11 and using options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/496) -- zos_copy - introduced an updated creation policy referred to as precedence rules such that if `dest_data_set` is set, this will take precedence. If `dest` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. If no precedent rule has been exercised, `dest` will be created with the same attributes of `src`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - introduced new computation capabilities such that if `dest` is a nonexistent data set, the attributes assigned will depend on the type of `src`. If `src` is a USS file, `dest` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If `src` is binary, `dest` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - option `dest_dataset` has been deprecated and removed in favor of the new option `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - was enhanced for when `src` is a directory and ends with "/", the contents of it will be copied into the root of `dest`. It it doesn't end with "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/496) - Bugfixes -------- -- zos_copy - fixes a bug that did not create a data set on the specified volume. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - fixes a bug where a number of attributes were not an option when using `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_job_output - fixes a bug that returned all ddname's when a specific ddname was provided. Now a specific ddname can be returned and all others ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/507) -- zos_mount - fixed option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/502) -- zos_operator - enhanced to allow for MVS operator `SET` command, `SET` is equivalent to the abbreviated `T` command. (https://github.com/ansible-collections/ibm_zos_core/pull/501) +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/640) +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/742) +- zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/732) +- zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/732) +- zos_copy - fixed wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) +- zos_copy - fixes a bug where the computed record length for a new destination dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) +- zos_job_query - fixes a bug where a boolean was not being properly compared. (https://github.com/ansible-collections/ibm_zos_core/pull/379) -v1.4.0-beta.1 -============= +v1.4.0 +====== Release Summary --------------- -Release Date: '2021-06-23' -This changlelog describes all changes made to the modules and plugins included -in this collection. -For additional details such as required dependencies and availablity review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ +Release Date: '2022-12-07' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ Major Changes @@ -150,6 +153,20 @@ Major Changes - zos_operator_action_query response messages were improved with more diagnostic information in the event an error is encountered. - zos_ping was updated to remove the need for the zos_ssh connection plugin dependency. +Minor Changes +------------- + +- zos_copy - enhanced the force option when `force=true` and the remote file or data set `dest` is NOT empty, the `dest` will be deleted and recreated with the `src` data set attributes, otherwise it will be recreated with the `dest` data set attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - enhanced to optimize how it captures the permission bits state for the `dest`. This change now reviews the source files instead of traversing the entire `dest` path. (https://github.com/ansible-collections/ibm_zos_core/pull/561) +- zos_copy - enhanced to support creating a parent directory when it does not exist in the `dest` path. Prior to this change, if a parent directory anywhere in the path did not exist the task would fail as it was stated in documentation. (https://github.com/ansible-collections/ibm_zos_core/pull/561) +- zos_copy - enhanced to support system symbols in PARMLIB. System symbols are elements that allow different z/OS® systems to share PARMLIB definitions while retaining unique values in those definitions. This was fixed in a future release through the use of one of the ZOAU dependency but this version of `ibm_zos_core` does not support that dependency version so this support was added. (https://github.com/ansible-collections/ibm_zos_core/pull/566) +- zos_copy - fixes a bug that when a directory is copied from the controller to the managed node and a mode is set, the mode is applied to the directory on the managed node. If the directory being copied contains files and mode is set, mode will only be applied to the files being copied not the pre-existing files. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - fixes a bug where options were not defined in the module argument spec that will result in error when running `ansible-core` v2.11 and using options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/496) +- zos_copy - introduced an updated creation policy referred to as precedence rules such that if `dest_data_set` is set, this will take precedence. If `dest` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. If no precedent rule has been exercised, `dest` will be created with the same attributes of `src`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - introduced new computation capabilities such that if `dest` is a nonexistent data set, the attributes assigned will depend on the type of `src`. If `src` is a USS file, `dest` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If `src` is binary, `dest` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - option `dest_dataset` has been deprecated and removed in favor of the new option `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - was enhanced for when `src` is a directory and ends with "/", the contents of it will be copied into the root of `dest`. It it doesn't end with "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/496) + Deprecated Features ------------------- @@ -160,7 +177,12 @@ Deprecated Features Bugfixes -------- +- zos_copy - fixes a bug that did not create a data set on the specified volume. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - fixes a bug where a number of attributes were not an option when using `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_job_output - fixes a bug that returned all ddname's when a specific ddname was provided. Now a specific ddname can be returned and all others ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/507) - zos_job_output was updated to correct possible truncated responses for the ddname content. This would occur for jobs with very large amounts of content from a ddname. +- zos_mount - fixed option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/502) +- zos_operator - enhanced to allow for MVS operator `SET` command, `SET` is equivalent to the abbreviated `T` command. (https://github.com/ansible-collections/ibm_zos_core/pull/501) - zos_ssh - connection plugin was updated to correct a bug in Ansible that would result in playbook task retries overriding the SSH connection retries. This is resolved by renaming the zos_ssh option @@ -175,7 +197,36 @@ New Modules - ibm.ibm_zos_core.zos_mount - Mount a z/OS file system. -v1.3.4 +v1.3.6 +====== + +Release Summary +--------------- + +Release Date: '2022-10-07' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + + +Minor Changes +------------- + +- zos_copy - was enhanced for when `src` is a directory and ends with "/", the contents of it will be copied into the root of `dest`. If it doesn't end with "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/515) + +Bugfixes +-------- + +- jobs.py - fixes a utility used by module `zos_job_output` that would truncate the DD content. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_copy - fixes a bug that when a directory is copied from the controller to the managed node and a mode is set, the mode is now applied to the directory on the controller. If the directory being copied contains files and mode is set, mode will only be applied to the files being copied not the pre-existing files.(https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_copy - fixes a bug where options were not defined in the module argument spec that will result in error when running `ansible-core` 2.11 and using options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_fetch - fixes a bug where an option was not defined in the module argument spec that will result in error when running `ansible-core` 2.11 and using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_job_submit - fixes a bug where an option was not defined in the module argument spec that will result in error when running `ansible-core` 2.11 and using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_ssh - fixes connection plugin which will error when using `ansible-core` 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_ssh - fixes connection plugin which will error when using `ansible-core` 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. (https://github.com/ansible-collections/ibm_zos_core/pull/513) + +v1.3.5 ====== Release Summary diff --git a/README.md b/README.md index 746b27f4d..d6505759b 100644 --- a/README.md +++ b/README.md @@ -49,11 +49,11 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against the following Ansible versions: >=2.9,<=2.14.1. +This collection has been tested against the following Ansible versions: >=2.9,<2.15. Copyright ========= -© Copyright IBM Corporation 2020-2021. +© Copyright IBM Corporation 2020-2023. License ======= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 1f8b2439a..e2cdc5634 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -109,4 +109,4 @@ plugins: shell: {} strategy: {} vars: {} -version: 1.5.0-beta.1 +version: 1.5.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index f843ea577..0e5580863 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -232,7 +232,7 @@ releases: - v1.3.3_summary.yml - v1.3.3_summary_bugs.yml release_date: '2022-06-07' - 1.3.4: + 1.3.5: changes: bugfixes: - "zos_ssh - connection plugin was updated to correct a bug in Ansible that\n @@ -251,6 +251,79 @@ releases: - 328-rename-retries-to-reconnection_retries.yml - v1.3.4_summary.yml release_date: '2022-06-07' + 1.3.6: + changes: + bugfixes: + - jobs.py - fixes a utility used by module `zos_job_output` that would truncate + the DD content. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_copy - fixes a bug that when a directory is copied from the controller + to the managed node and a mode is set, the mode is now applied to the directory + on the controller. If the directory being copied contains files and mode is + set, mode will only be applied to the files being copied not the pre-existing + files.(https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_copy - fixes a bug where options were not defined in the module argument + spec that will result in error when running `ansible-core` 2.11 and using + options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_fetch - fixes a bug where an option was not defined in the module argument + spec that will result in error when running `ansible-core` 2.11 and using + option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_job_submit - fixes a bug where an option was not defined in the module + argument spec that will result in error when running `ansible-core` 2.11 and + using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_ssh - fixes connection plugin which will error when using `ansible-core` + 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. + (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_ssh - fixes connection plugin which will error when using `ansible-core` + 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. + (https://github.com/ansible-collections/ibm_zos_core/pull/513) + minor_changes: + - zos_copy - was enhanced for when `src` is a directory and ends with "/", the + contents of it will be copied into the root of `dest`. If it doesn't end with + "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/515) + release_summary: "Release Date: '2022-10-07'\nThis changelog describes all changes + made to the modules and plugins included\nin this collection. The release + date is the date the changelog is created.\nFor additional details such as + required dependencies and availability review\nthe collections `release notes + <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + \n" + fragments: + - 462-copy-fetch-submit-utils.yml + - 513-zos_ssh-support-ansible-2.11.yml + - 515-copy-support-directories.yml + - v1.3.6_summary.yml + release_date: '2022-10-07' + 1.4.0: + changes: + minor_changes: + - zos_copy - enhanced to optimize how it captures the permission bits state + for the `dest`. This change now reviews the source files instead of traversing + the entire `dest` path. (https://github.com/ansible-collections/ibm_zos_core/pull/561) + - zos_copy - enhanced to support creating a parent directory when it does not + exist in the `dest` path. Prior to this change, if a parent directory anywhere + in the path did not exist the task would fail as it was stated in documentation. + (https://github.com/ansible-collections/ibm_zos_core/pull/561) + - "zos_copy - enhanced to support system symbols in PARMLIB. System symbols + are elements that allow different z/OS\xAE systems to share PARMLIB definitions + while retaining unique values in those definitions. This was fixed in a future + release through the use of one of the ZOAU dependency but this version of + `ibm_zos_core` does not support that dependency version so this support was + added. (https://github.com/ansible-collections/ibm_zos_core/pull/566)" + release_summary: 'Release Date: ''2022-12-07'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + + ' + fragments: + - 561-update-directory-create.yml + - 566-update-with-symbol-support.yml + - v1.4.0_summary.yml + release_date: '2022-12-07' 1.4.0-beta.1: changes: bugfixes: @@ -378,6 +451,102 @@ releases: - 507-display-specific-ddname.yml - v1.4.0-beta.2_summary.yml release_date: '2022-10-13' + 1.4.1: + changes: + bugfixes: + - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/640) + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/742) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/732) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/732) + - zos_copy - fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) + - zos_copy - fixes a bug where the computed record length for a new destination + dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) + - zos_job_query - fixes a bug where a boolean was not being properly compared. + (https://github.com/ansible-collections/ibm_zos_core/pull/379) + release_summary: 'Release Date: ''2023-04-18'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + + ' + fragments: + - 579-zos-query-boolean-correction.yml + - 588-update-emergency-backup.yml + - 599-copy-carriage-return.yml + - 601-copy-loadlib-member.yml + - 728-zos_operator-example-updates.yml + - 732-zos_copy-encoding-bugs.yml + - 742_zos_copy-mode-is-applied-to-the-destination-directory-a-deviation-from-the-communtiy-module-behavior.yaml + - v1.4.1_summary.yml + release_date: '2023-04-18' + 1.5.0: + changes: + bugfixes: + - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/641) + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/746) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/725) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/725) + - zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would + result in a `type` error that a stack trace would result in the response, + issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job encounters a security exception + no job log would would result in the response, issue 684. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` + to return a response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that no job log would result in the response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is purged by the system that a + stack trace would result in the response, issue 681. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that + a stack trace would result in the response, issue 623. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when resources (data sets) identified in + JCL did not exist such that a stack trace would result in the response, issue + 624. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue where the response did not include the job + log when a non-zero return code would occur, issue 655. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_operator - fixed incorrect example descriptions and updated the doc to + highlight the deprecated option `wait`. (https://github.com/ansible-collections/ibm_zos_core/pull/648) + release_summary: 'Release Date: ''2023-04-21'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 641-copy-loadlib-member.yml + - 648-zos_operator-examples.yml + - 663-zos_gather_facts-update-docstring.yml + - 683-zos_job_submit-bugs.yml + - 725-zos_copy-encoding-bugs.yml + - 729-zos_operator-example-added.yml + - 739-zos_copy-volume-symbol-test.yml + - 746--Mode-set-for-files-is-applied-to-destination-directory.yml + - v1.5.0_summary.yml + release_date: '2023-04-21' 1.5.0-beta.1: changes: bugfixes: @@ -385,15 +554,15 @@ releases: a directory needing also to be encoded. The failure would also delete the `src` which was not desirable behavior. Fixes deletion of src on encoding error. (https://github.com/ansible-collections/ibm_zos_core/pull/321). - - zos_copy - Fixes wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system to - its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - Fixes a bug where copying a member from a loadlib to another loadlib fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) - zos_copy - Fixes a bug where if a destination has accented characters in its content, the module would fail when trying to determine if it is empty. (https://github.com/ansible-collections/ibm_zos_core/pull/634) - zos_copy - Fixes a bug where the computed record length for a new destination dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) + - zos_copy - Fixes wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - module was updated to correct a bug in the case when the destination (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module would fail in determining if the PDSE actually existed and try to create it @@ -406,8 +575,8 @@ releases: the module should look for the data set, whether or not it is cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/325). - zos_data_set - Fixes a bug where the default record format FB was actually never enforced and when enforced it would cause VSAM creation to fail with - a Dynalloc failure. Also cleans up some of the options that are set by - default when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) + a Dynalloc failure. Also cleans up some of the options that are set by default + when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) - zos_fetch - Updates the modules behavior when fetching VSAM data sets such that the maximum record length is now determined when creating a temporary data set to copy the VSAM data into and a variable-length (VB) data set is @@ -515,7 +684,7 @@ releases: behaviors and reduces the possibility to encounter a permissions issue. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_job_submit - was updated to include an additional error code condition JCLERR. (https://github.com/ansible-collections/ibm_zos_core/pull/312) - - zos_lineinfile- updates the module with a new option named tmp_hlq. This allows + - zos_lineinfile - updates the module with a new option named tmp_hlq. This allows for a user to specify the data set high level qualifier (HLQ) used in any temporary data set created by the module. Often, the defaults are not permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 788f96b73..ab1e07e49 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,7 +6,7 @@ Releases ======== -Version 1.5.0-beta.1 +Version 1.5.0 ==================== New Modules @@ -61,7 +61,7 @@ Minor Changes Bugfixes -------- -- ``ibm_zos_copy`` +- ``zos_copy`` - fixes a bug such that the module fails when copying files from a directory needing also to be encoded. The failure would also delete the `src` which was not desirable behavior. Fixes deletion of src on encoding error. - module was updated to correct a bug in the case when the destination (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module would fail in determining if the PDSE actually existed and try to create it when it already existed resulting in an error that would prevent the module from correctly executing. @@ -69,6 +69,10 @@ Bugfixes - fixes a bug where if a destination has accented characters in its content, the module would fail when trying to determine if it is empty. - fixes a bug where copying a member from a loadlib to another loadlib fails. - fixed wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. + - copy failed from a loadlib member to another loadlib member. Fix now looks for an error in stdout while copying to perform a fallback copy for executables. + - fixes a bug where the module would change the mode for a directory when copying into it the contents of another. + - fixes a bug where source files not encoded in IBM-1047 would trigger an encoding error while computing the record length for a new destination dataset. + - fixes a bug where the code for fixing an issue with newlines in files would use the wrong encoding for normalization. - ``zos_data_set`` - Fixes a bug such that the module will delete a catalogued data set over an uncatalogued data set even though the volume is provided for the uncataloged data set. This is unexpected behavior and does not align to documentation; correct behavior is that when a volume is provided that is the first place the module should look for the data set, whether or not it is cataloged. @@ -76,6 +80,16 @@ Bugfixes - ``zos_fetch`` - Updates the modules behavior when fetching VSAM data sets such that the maximum record length is now determined when creating a temporary data set to copy the VSAM data into and a variable-length (VB) data set is used. - ``zos_job_output`` - fixes a bug that returned all ddname's when a specific ddnamae was provided. Now a specific ddname can be returned and all others ignored. - ``zos_job_query`` - was updated to correct a boolean condition that always evaluated to "CANCELLED". +- ``zos_job_submit`` + + - fixes the issue when `wait_time_s` was set to 0 that would result in a `type` error and the response would be a stack trace. + - fixes the issue when a job encounters a security exception, no job log would would result in the response. + - fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` to return a response. + - fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that no job log would result in the response. + - fixes the issue when a job is purged by the system that the response would result in a stack trace. + - fixes the issue when invalid JCL syntax is submitted such that the response would result in a stack trace. + - fixes the issue when resources (data sets) identified in JCL did not exist such that a response would result in a stack trace. + - fixes the issue where the response did not include the job log when a non-zero return code would occur. - ``zos_mount`` - fixed option `tag_ccsid` to correctly allow for type int. - ``zos_mvs_raw`` - module was updated to correct a bug when no DD statements were provided. The module when no option was provided for `dds` would error, a default was provided to correct this behavior. - ``zos_operator`` @@ -84,6 +98,7 @@ Bugfixes - fixed such that specifying wait_time_s would throw an error. - fixed the wait_time_s to default to 1 second. - was updated to correct missing verbosity content when the option verbose was set to True. zos_operator - was updated to correct the trailing lines that would appear in the result content. + - fixed incorrect example descriptions and updated the doc to highlight the deprecated option `wait`. Deprecated Features ------------------- @@ -95,6 +110,7 @@ Deprecated Features Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ @@ -104,7 +120,48 @@ Reference * Supported by `z/OS V2R3`_ or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.x`_ +* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. + +Version 1.4.1 +============= + +Bug fixes + +-------------------------- + +* ``zos_copy`` + + * Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. + * Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. + * Fixes a bug where the code for fixing an issue with newlines in + files. + * fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. + * fixes a bug where the computed record length for a new destination + dataset would include newline characters. + +* ``zos_job_query`` + + * fixes a bug where a boolean was not being properly compared. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ +* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and + `Z Open Automation Utilities 1.1.1`_ Version 1.4.0 ============= @@ -744,10 +801,10 @@ Reference .. _3.11: https://www.ibm.com/docs/en/python-zos/3.11 .. _Z Open Automation Utilities 1.1.0: - https://www.ibm.com/docs/en/zoau/1.1.0 + https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: https://www.ibm.com/docs/en/zoau/1.1.1 -.. _Z Open Automation Utilities 1.2.x: +.. _Z Open Automation Utilities 1.2.2: https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm diff --git a/galaxy.yml b/galaxy.yml index 14cca831b..8aaf403db 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.5.0-beta.1 +version: 1.5.0 # Collection README file readme: README.md @@ -18,6 +18,8 @@ authors: - Ketan Kelkar <ketan.kelkar@ibm.com> - Ivan Moreno <ivan.moreno.soto@ibm.com> - Oscar Fernando Flores Garcia<fernando.flores@ibm.com> + - Jenny Huang <jennyhuang@ibm.com> + - Marcel Guitierrez <andre.marcel.gutierrez@ibm.com> # Description description: The IBM z/OS core collection includes connection plugins, action plugins, modules, filters and ansible-doc to automate tasks on z/OS. diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 1459bc478..c2aab577a 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.5.0-beta.1" +version: "1.5.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/meta/runtime.yml b/meta/runtime.yml index 43bbe4509..dbba1c7ce 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9' +requires_ansible: '>=2.9,<2.15' diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index cfcfd2bf0..a96bf46d5 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 6e37d5823..9af6260f4 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index beff12cd2..a3475be11 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 +# Copyright (c) IBM Corporation 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/dependencyfinder.py b/tests/dependencyfinder.py index 13f1e4391..fa24811ff 100755 --- a/tests/dependencyfinder.py +++ b/tests/dependencyfinder.py @@ -450,6 +450,28 @@ def build_artifacts_from_collection(collection_root): return artifacts +def get_all_tests(collection_root): + """Build a list of all test cases for when all tests need to be run + Args: + collection_root (str): The path to the root of the collection + Returns: + list[tests]: A list of test cases. + """ + + files = [] + files += get_all_files_in_dir_tree(collection_root + "/tests/unit") + files += get_all_files_in_dir_tree(collection_root + "/tests/functional") + + test_suites = [] + for file in files: + if file.endswith(".py"): + path, filename = os.path.split(file) + if filename.startswith('test'): + test_suites.append(file) + + return test_suites + + def get_all_files_in_dir_tree(base_path): """Recursively search subdirectories for files. @@ -620,6 +642,14 @@ def parse_arguments(): default=False, help="Detect only the changes from the branch request-pull.", ) + parser.add_argument( + "-a", + "--all", + required=False, + action="store_true", + default=False, + help="A list of all test cases minus any skipped tests.", + ) args = parser.parse_args() return args @@ -635,6 +665,8 @@ def parse_arguments(): if args.minimum: changed_files = get_changed_plugins(args.path, args.branch) + elif args.all: + changed_files = get_all_tests(args.path) else: changed_files = get_changed_files(args.path, args.branch) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 888281712..3364d12da 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 0e552400cf1c44db2e156d0f7b430f813e37546f Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 28 Apr 2023 13:45:02 -0700 Subject: [PATCH 104/413] Merge master to dev for 1.6.0 beta.1 (#763) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- CHANGELOG.rst | 45 +++ changelogs/.plugin-cache.yaml | 9 +- changelogs/changelog.yaml | 85 +++++- docs/source/modules/zos_blockinfile.rst | 14 +- docs/source/modules/zos_data_set.rst | 39 +++ docs/source/modules/zos_job_query.rst | 2 +- docs/source/modules/zos_lineinfile.rst | 19 ++ docs/source/modules/zos_tso_command.rst | 25 ++ docs/source/modules/zos_volume_init.rst | 257 ++++++++++++++++++ docs/source/release_notes.rst | 50 +++- galaxy.yml | 4 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_fetch.py | 2 +- plugins/module_utils/system.py | 2 +- plugins/modules/zos_volume_init.py | 6 +- .../functional/modules/test_zos_copy_func.py | 2 - .../modules/test_zos_data_set_func.py | 2 +- .../functional/modules/test_zos_find_func.py | 2 +- .../modules/test_zos_job_query_func.py | 2 +- .../modules/test_zos_lineinfile_func.py | 2 +- 20 files changed, 543 insertions(+), 28 deletions(-) create mode 100644 docs/source/modules/zos_volume_init.rst diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 98cab36f3..c19a39bbc 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,51 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.6.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2023-04-26' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. (https://github.com/ansible-collections/ibm_zos_core/pull/654) + +Minor Changes +------------- + +- Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning".. warn Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) +- module_utils - job.py utility did not support positional wiled card placement, this enhancement uses `fnmatch` logic to support wild cards. +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) +- zos_copy - was enhanced to keep track of modified members in a destination dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) +- zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). +- zos_job_query - ansible module does not support positional wild card placement for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_lineinfile - would access data sets with exclusive access so no other task can read the data, this enhancement allows for a data set to be opened with a disposition set to share so that other tasks can access the data when option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) +- zos_tso_command - was enhanced to accept `max_rc` as an option. This option allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) + +Bugfixes +-------- + +- Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". +- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an approriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_encode - fixes a bug where converted files were not tagged afterwards with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) +- zos_find - fixes a bug where find result values stopped being returned after first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) +- zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) + +New Modules +----------- + +- ibm.ibm_zos_core.zos_volume_init - Initialize volumes or minidisks. + v1.5.0 ====== diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index e2cdc5634..2c3c67c65 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -6,6 +6,7 @@ plugins: callback: {} cliconf: {} connection: {} + filter: {} httpapi: {} inventory: {} lookup: {} @@ -105,8 +106,14 @@ plugins: name: zos_tso_command namespace: '' version_added: 1.1.0 + zos_volume_init: + description: Initialize volumes or minidisks. + name: zos_volume_init + namespace: '' + version_added: 1.6.0 netconf: {} shell: {} strategy: {} + test: {} vars: {} -version: 1.5.0 +version: 1.6.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 0e5580863..51bba3c4f 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -684,10 +684,10 @@ releases: behaviors and reduces the possibility to encounter a permissions issue. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_job_submit - was updated to include an additional error code condition JCLERR. (https://github.com/ansible-collections/ibm_zos_core/pull/312) - - zos_lineinfile - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_lineinfile - updates the module with a new option named tmp_hlq. This + allows for a user to specify the data set high level qualifier (HLQ) used + in any temporary data set created by the module. Often, the defaults are not + permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - zos_mount - updates the module with a new option named tmp_hlq. This allows for a user to specify the data set high level qualifier (HLQ) used in any temporary data set created by the module. Often, the defaults are not permitted @@ -760,3 +760,80 @@ releases: name: zos_gather_facts namespace: '' release_date: '2022-11-02' + 1.6.0-beta.1: + changes: + bugfixes: + - Fixed wrong error message when a USS source is not found, aligning with a + similar error message from zos_blockinfile "{src} does not exist". + - zos_blockinfile - was unable to use double quotes which prevented some use + cases and did not display an approriate message. The fix now allows for double + quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) + - zos_encode - fixes a bug where converted files were not tagged afterwards + with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) + - zos_find - fixes a bug where find result values stopped being returned after + first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) + - zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed + to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) + major_changes: + - zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. + (https://github.com/ansible-collections/ibm_zos_core/pull/654) + minor_changes: + - Updated the text converter import from "from ansible.module_utils._text" to + "from ansible.module_utils.common.text.converters" to remove warning".. warn + Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) + - module_utils - job.py utility did not support positional wiled card placement, + this enhancement uses `fnmatch` logic to support wild cards. + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) + - zos_copy - was enhanced to keep track of modified members in a destination + dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) + - zos_data_set - add force parameter to enable member delete while pdse is in + use (https://github.com/ansible-collections/ibm_zos_core/pull/718). + - zos_job_query - ansible module does not support positional wild card placement + for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout + the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) + - zos_lineinfile - would access data sets with exclusive access so no other + task can read the data, this enhancement allows for a data set to be opened + with a disposition set to share so that other tasks can access the data when + option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) + - zos_tso_command - was enhanced to accept `max_rc` as an option. This option + allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) + release_summary: 'Release Date: ''2023-04-26'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 309-replace-text-zos-encode.yml + - 323-zos-job-query-handle-multiple-wildcards.yml + - 358-zos-data-set-support-disposition-shr.yml + - 408-restore-members-on-failure.yml + - 417-can-quotes-in-content-can-be-supported.yml + - 574-zos_find_stoppedonnotfound.yml + - 584-zos_lineinfile-error-message.yml + - 602-text-converter-import.yml + - 619-Mode-set-for-files-is-applied-to-destination-directory.yml + - 654-new-module-zos_volume_init.yml + - 659-zos-lineinfile-f-string.yml + - 666-zos_tso_command_maxrc.yml + - 727-zos-blockinfile-examples.yml + - 731-zos_linefile-disposition_share.yaml + - 734-copy-loadlib-member-test-case.yml + - 740-zos_copy-volume-symbol-test.yml + - 743-zos_copy-encoding-bugs.yml + - v1.6.0-beta.1_summary.yml + modules: + - description: Initialize volumes or minidisks. + name: zos_volume_init + namespace: '' + release_date: '2023-04-26' diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 5608a0ebb..3633620ad 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -195,13 +195,11 @@ Examples block: | MOUNT FILESYSTEM('SOME.DATA.SET') TYPE(ZFS) MODE(READ) MOUNTPOINT('/tmp/src/somedirectory') - - name: Remove a library as well as surrounding markers zos_blockinfile: state: absent src: SYS1.PARMLIB(PROG00) marker: "/* {mark} ANSIBLE MANAGED BLOCK FOR SOME.DATA.SET */" - - name: Add ZOAU path to PATH in /etc/profile zos_blockinfile: src: /etc/profile @@ -210,7 +208,6 @@ Examples ZOAU=/path/to/zoau_dir/bin export ZOAU PATH=$ZOAU:$PATH - - name: Insert/Update HTML surrounded by custom markers after <body> line zos_blockinfile: path: /var/www/html/index.html @@ -219,13 +216,11 @@ Examples block: | <h1>Welcome to {{ ansible_hostname }}</h1> <p>Last updated on {{ ansible_date_time.iso8601 }}</p> - - name: Remove HTML as well as surrounding markers zos_blockinfile: path: /var/www/html/index.html state: absent marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->" - - name: Add mappings to /etc/hosts zos_blockinfile: path: /etc/hosts @@ -236,7 +231,6 @@ Examples - { name: host1, ip: 10.10.1.10 } - { name: host2, ip: 10.10.1.11 } - { name: host3, ip: 10.10.1.12 } - - name: Add a code block to a member using a predefined indentation. zos_blockinfile: path: SYS1.PARMLIB(BPXPRM00) @@ -246,6 +240,14 @@ Examples LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 + - name: Update a script with commands containing quotes. + zos_blockinfile: + src: "/u/scripts/script.sh" + insertafter: "EOF" + block: | + cat "//'{{ DS_NAME }}'" + cat "//'{{ DS_NAME_2 }}'" + - name: Set facts for the following two tasks. set_fact: HLQ: 'ANSIBLE' diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 65f1cc75b..046b8a2f5 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -47,6 +47,9 @@ state If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. @@ -268,6 +271,19 @@ tmp_hlq | **type**: str +force + Specifies that the data set can be shared with others during a member delete operation which results in the data set you are updating to be simultaneously updated by others. + + This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. + + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + + The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + + | **required**: False + | **type**: bool + + batch Batch can be used to perform operations on multiple data sets in a single module call. @@ -296,6 +312,9 @@ batch If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. @@ -508,6 +527,19 @@ batch | **type**: bool + force + Specifies that the data set can be shared with others during a member delete operation which results in the data set you are updating to be simultaneously updated by others. + + This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. + + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + + The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + + | **required**: False + | **type**: bool + + @@ -599,6 +631,13 @@ Examples state: absent type: MEMBER + - name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR + zos_data_set: + name: someds.name.here(mydata) + state: absent + type: MEMBER + force: yes + - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index d33ca6744..d34098617 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -56,7 +56,7 @@ owner job_id The job id that has been assigned to the job. - A job id begins must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. + A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index bc56cf7b5..89ebcc805 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -185,6 +185,17 @@ encoding | **default**: IBM-1047 +force + Specifies that the data set can be shared with others during an update which results in the data set you are updating to be simultaneously updated by others. + + This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. + + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. + + | **required**: False + | **type**: bool + + Examples @@ -226,6 +237,14 @@ Examples line: '\1APPUser\3' backrefs: yes + - name: Add a line to a member while a task is in execution + zos_lineinfile: + src: SOME.PARTITIONED.DATA.SET(DATA) + insertafter: EOF + line: 'Should be a working test now' + force: True + + diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index da86cf18d..d11cc8a98 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -31,10 +31,21 @@ commands Accepts a single string or list of strings as input. + If a list of strings is provided, processing will stop at the first failure, based on rc. + | **required**: True | **type**: raw +max_rc + Specifies the maximum return code allowed for a TSO command. + + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + + | **required**: False + | **type**: int + + Examples @@ -54,6 +65,12 @@ Examples commands: - LU TESTUSER + - name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) + zos_tso_command: + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 + @@ -87,6 +104,14 @@ output | **returned**: always | **type**: int + max_rc + Specifies the maximum return code allowed for a TSO command. + + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + + | **returned**: always + | **type**: int + content The response resulting from the execution of the TSO command. diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst new file mode 100644 index 000000000..195435924 --- /dev/null +++ b/docs/source/modules/zos_volume_init.rst @@ -0,0 +1,257 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_volume_init.py + +.. _zos_volume_init_module: + + +zos_volume_init -- Initialize volumes or minidisks. +=================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Initialize a volume or minidisk on z/OS. +- *zos_volume_init* will create the volume label and entry into the volume table of contents (VTOC). +- Volumes are used for storing data and executable programs. +- A minidisk is a portion of a disk that is linked to your virtual machine. +- A VTOC lists the data sets that reside on a volume, their location, size, and other attributes. +- *zos_volume_init* uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class `STGADMIN.ICK.INIT`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. +- ICKDSF is an Authorized Program Facility (APF) program on z/OS, *zos_volume_init* will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. +- Note that defaults set on target z/OS systems may override ICKDSF parameters. +- If is recommended that data on the volume is backed up as the *zos_volume_init* module will not perform any backups. You can use the `zos_backup_restore <./zos_backup_restore.html>`_ module to backup a volume. + + + + + +Parameters +---------- + + +address + *address* is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. + + *address* can be the number assigned to the device (device number) when it is installed or the virtual address. + + | **required**: True + | **type**: str + + +verify_volid + Verify that the volume serial matches what is on the existing volume or minidisk. + + *verify_volid* must be 1 to 6 alphanumeric characters or ``*NONE*``. + + To verify that a volume serial number does not exist, use *verify_volid=*NONE**. + + If *verify_volid* is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. + + If *verify_volid=*NONE** is specified and a volume serial is found on the volume or minidisk, initialization does not complete. + + Note, this option is **not** a boolean, leave it blank to skip the verification. + + | **required**: False + | **type**: str + + +verify_offline + Verify that the device is not online to any other systems, initialization does not complete. + + | **required**: False + | **type**: bool + | **default**: True + + +volid + The volume serial number used to initialize a volume or minidisk. + + Expects 1-6 alphanumeric, national ($,#,@) or special characters. + + A *volid* with less than 6 characters will be padded with spaces. + + A *volid* can also be referred to as volser or volume serial number. + + When *volid* is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. + + | **required**: False + | **type**: str + + +vtoc_size + The number of tracks to initialize the volume table of contents (VTOC) with. + + The VTOC will be placed in cylinder 0 head 1. + + If no tracks are specified it will default to the number of tracks in a cylinder minus 1. Tracks in a cylinder vary based on direct-access storage device (DASD) models, for 3390 a cylinder is 15 tracks. + + | **required**: False + | **type**: int + + +index + Create a volume table of contents (VTOC) index. + + The VTOC index enhances the performance of VTOC access. + + When set to *false*, no index will be created. + + | **required**: False + | **type**: bool + | **default**: True + + +sms_managed + Specifies that the volume be managed by Storage Management System (SMS). + + If *sms_managed* is *true* then *index* must also be *true*. + + | **required**: False + | **type**: bool + | **default**: True + + +verify_volume_empty + Verify that no data sets other than the volume table of contents (VTOC) index or the VSAM Volume Data Set(VVDS) exist on the target volume. + + | **required**: False + | **type**: bool + | **default**: True + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup datasets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize target volume with all default options. Target volume address is '1234', set volume name to 'DEMO01'. + Target volume is checked to ensure it is offline and contains no data sets. Volume is SMS managed, has an index + and VTOC size defined by the system. + zos_volume_init: + address: "1234" + volid: "DEMO01" + + - name: Initialize target volume with all default options and additionally check the existing volid + matches the given value 'DEMO02' before re-initializing the volume and renaming it to 'DEMO01'. + zos_volume_init: + address: "1234" + volid: "DEMO01" + verify_volid: "DEMO02" + + - name: Initialize non-SMS managed target volume with all the default options. + zos_volume_init: + address: "1234" + volid: "DEMO01" + sms_managed: no + + - name: Initialize non-SMS managed target volume with all the default options and + override the default high level qualifier (HLQ). + zos_volume_init: + address: 1234 + volid: DEMO01 + sms_managed: no + tmp_hlq: TESTUSR + + - name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as + the existing volume serial is 'ine8d8' and there are no pre-existing data sets on the target. The check to see + if volume is online before intialization is skipped. + zos_volume_init: + address: e8d8 + vtoc_size: 30 + index: yes + sms_managed: yes + volid: ine8d8 + verify_volid: ine8d8 + verify_volume_empty: yes + verify_offline: no + + - name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' + using Ansible loops. + zos_volume_init: + address: "090{{ item }}" + volid: "DEMO0{{ item }}" + loop: "{{ range(1, 4, 1) }}" + + + + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_backup_restore_module` + + + + +Return Values +------------- + + +msg + Failure message returned by module. + + | **returned**: failure + | **type**: str + | **sample**: 'Index' cannot be False for SMS managed volumes. + +rc + Return code from ICKDSF init command. + + | **returned**: when ICKDSF program is run. + | **type**: dict + +content + Raw output from ICKDSF. + + | **returned**: when ICKDSF program is run. + | **type**: list + | **elements**: str + | **sample**: + + .. code-block:: json + + [ + "1ICKDSF - MVS/ESA DEVICE SUPPORT FACILITIES 17.0 TIME: 18:32:22 01/17/23 PAGE 1", + "0 ", + "0 INIT UNIT(0903) NOVERIFY NOVERIFYOFFLINE VOLID(KET678) -", + "0 NODS NOINDEX", + "-ICK00700I DEVICE INFORMATION FOR 0903 IS CURRENTLY AS FOLLOWS:", + "- PHYSICAL DEVICE = 3390", + "- STORAGE CONTROLLER = 2107", + "- STORAGE CONTROL DESCRIPTOR = E8", + "- DEVICE DESCRIPTOR = 0C", + "- ADDITIONAL DEVICE INFORMATION = 4A00003C", + "- TRKS/CYL = 15, # PRIMARY CYLS = 100", + "0ICK04000I DEVICE IS IN SIMPLEX STATE", + "0ICK00703I DEVICE IS OPERATED AS A MINIDISK", + " ICK00091I 0903 NED=002107.900.IBM.75.0000000BBA01", + "-ICK03091I EXISTING VOLUME SERIAL READ = KET987", + "-ICK03096I EXISTING VTOC IS LOCATED AT CCHH=X\u00270000 0001\u0027 AND IS 14 TRACKS.", + "0ICK01314I VTOC IS LOCATED AT CCHH=X\u00270000 0001\u0027 AND IS 14 TRACKS.", + "-ICK00001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 0", + "0 18:32:22 01/17/23", + "0 ", + "-ICK00002I ICKDSF PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 0" + ] + diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index ab1e07e49..d897feef4 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,17 +1,63 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021 . +.. © Copyright IBM Corporation 2020, 2021, 2023 . .. ........................................................................... ======== Releases ======== -Version 1.5.0 +Version 1.6.0-beta.1 ==================== New Modules ----------- +- ``zos_volume_init`` - Can initialize volumes or minidisks on target z/OS systems which includes creating a volume label and an entry into the volume table of contents (VTOC). + +Minor Changes +------------- + +- ``zos_blockinfile`` - Adds an enhancement to allow double quotes within a block. +- ``zos_data_set`` - Adds a new option named *force* to enable deletion of a data member in a PDSE that is simultaneously in use by others. +- ``zos_job_query`` - Enables embedded positional wild card placement throughout *job_name* and *job_id* parameters. +- ``zos_lineinfile`` - Adds a new option named *force* to enable modification of a data member in a data set that is simultaneously in use by others. +- ``zos_tso_command`` - Adds a new option named *max_rc* to enable non-zero return codes lower than the specified maximum return as succeeded. + +Bugfixes +-------- + +- ``zos_copy`` + - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. + - Fixes a bug where the module would change the mode for a directory when copying in the contents of another directory. + - Fixes a bug where the incorrect encoding would be used during normalization, particularly when processing newlines in files. +- ``zos_encode`` - Fixes a bug where converted files were not tagged with the new code set afterwards. +- ``zos_find`` - Fixes a bug where the module would stop searching and exit after the first value in a list was not found. +- ``zos_lineinfile`` + - Removes use of Python f-string to ensure support for Python 2.7 on the controller. + - Fixes a bug where an incorect error message would be raised when a USS source was not found. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. + + +Version 1.5.0 +============= + +New Modules +----------- + - ``zos_gather_facts`` - can retrieve variables from target z/OS systems that are then available to playbooks through the ansible_facts dictionary and managed using filters. Major Changes diff --git a/galaxy.yml b/galaxy.yml index 8aaf403db..cca9297d3 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.5.0 +version: 1.6.0-beta.1 # Collection README file readme: README.md @@ -17,7 +17,7 @@ authors: - Rich Parker <richp@ibm.com> - Ketan Kelkar <ketan.kelkar@ibm.com> - Ivan Moreno <ivan.moreno.soto@ibm.com> - - Oscar Fernando Flores Garcia<fernando.flores@ibm.com> + - Oscar Fernando Flores Garcia <fernando.flores@ibm.com> - Jenny Huang <jennyhuang@ibm.com> - Marcel Guitierrez <andre.marcel.gutierrez@ibm.com> diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index c2aab577a..484ad69fd 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.5.0" +version: "1.6.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index dd2172fc8..67bd83981 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2021, 2022 +# Copyright (c) IBM Corporation 2019-2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 90b9d1013..5be6d1944 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py index 03854a80f..6dbc9f97e 100644 --- a/plugins/modules/zos_volume_init.py +++ b/plugins/modules/zos_volume_init.py @@ -63,12 +63,12 @@ verify_volid: description: - Verify that the volume serial matches what is on the existing volume or minidisk. - - I(verify_volid) must be 1 to 6 alphanumeric characters or "*NONE*". + - I(verify_volid) must be 1 to 6 alphanumeric characters or C(*NONE*). - To verify that a volume serial number does not exist, use - I(verify_volid="*NONE*"). + I(verify_volid=*NONE*). - If I(verify_volid) is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. - - If I(verify_volid="*NONE*") is specified and a volume serial is found on + - If I(verify_volid=*NONE*) is specified and a volume serial is found on the volume or minidisk, initialization does not complete. - Note, this option is B(not) a boolean, leave it blank to skip the verification. required: false diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index c5f660a6c..5a575d87c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1916,8 +1916,6 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): ) dest_name = "{0}({1})".format(dest, member) src_name = "{0}({1})".format(src, member) - - # both src and dest need to be a loadlib rc = link_loadlib_from_cobol(hosts, dest_name, cobol_pds) assert rc == 0 diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 37bdcb682..118fdcc18 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 04dfb7368..fb1a47179 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 0231cc874..7128f12a7 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 7b77c155d..85f4184af 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From f4dc3f725a360b5bc59723d15e3461565c46fad3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 4 May 2023 17:51:00 -0600 Subject: [PATCH 105/413] Bugfix/619/mode set for files applied test case (#757) * Add test case for copy dest file * Add comments * Add test for folders * Adjust spaces * Changes for ensure consistency for all tests * Changes of name and clean creations --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> --- .../functional/modules/test_zos_copy_func.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 5a575d87c..97ec099dc 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1023,6 +1023,60 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): assert "does not exist" in result.get("msg") +@pytest.mark.uss +@pytest.mark.parametrize("src", [ + dict(src="/etc/profile", is_remote=False), + dict(src="/etc/profile", is_remote=True),]) +def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, src): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + try: + hosts.all.file(path=dest_path, state="directory", mode="750") + permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + hosts.all.zos_copy(content=src["src"], dest=dest_path) + permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + + for before in permissions_before.contacted.values(): + permissions_be_copy = before.get("stdout") + + for after in permissions.contacted.values(): + permissions_af_copy = after.get("stdout") + + permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] + permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] + + assert permissions_be_copy == permissions_af_copy + finally: + hosts.all.file(path=dest_path, state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("src", [ + dict(src="/etc/", is_remote=False), + dict(src="/etc/", is_remote=True),]) +def test_ensure_copy_directory_does_not_change_permission_on_dest(ansible_zos_module, src): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + try: + hosts.all.file(path=dest_path, state="directory", mode="750") + permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + hosts.all.zos_copy(content=src["src"], dest=dest_path) + permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + + for before in permissions_before.contacted.values(): + permissions_be_copy = before.get("stdout") + + for after in permissions.contacted.values(): + permissions_af_copy = after.get("stdout") + + permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] + permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] + + assert permissions_be_copy == permissions_af_copy + finally: + hosts.all.file(path=dest_path, state="absent") + + @pytest.mark.uss @pytest.mark.seq def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): @@ -2727,3 +2781,4 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( assert v_cp.get("rc") == 0 finally: hosts.all.zos_data_set(name=dest, state="absent") + \ No newline at end of file From d54ac79b1461786fb0c3811d0a181952f332c586 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 4 May 2023 21:55:30 -0600 Subject: [PATCH 106/413] Bugfix/381/failed when the job name was null or not found (#747) * Add the verbose for failed when job name was null or not found * Adjust message for what we can get * Whitespaces move * Add code from dev * Ecode utility as is in dev * Year for copyright * Case for having both the jod_id and job_name * Ecode utils functions not in my branch * Add final line ecode * Add fragment * Delete encode function two times, adjust job message and change the fragment * Change variable name for one more descriptive * Restore encode and change one word * Encode * bugfixes * Set up as dev * Better fragment --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...7-failed_when_the_job_name_was_null_or_not_found.yaml | 5 +++++ plugins/module_utils/job.py | 9 +++++++-- tests/functional/modules/test_zos_job_output_func.py | 5 +++-- 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml diff --git a/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml b/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml new file mode 100644 index 000000000..0830b8fe3 --- /dev/null +++ b/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml @@ -0,0 +1,5 @@ +bugfixes: +- zos_job_output - Error message did not specify the job not found. + Fix now specifies the job_id or job_name being searched to ensure more + information is given back to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/747) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 9af6260f4..94909aba4 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -86,7 +86,12 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, def _job_not_found(job_id, owner, job_name, dd_name): # Note that the text in the msg_txt is used in test cases thus sensitive to change jobs = [] - + if job_id != '*' and job_name != '*': + job_not_found_msg = "{0} with the job_id {1}".format(job_name.upper(), job_id.upper()) + elif job_id != '*': + job_not_found_msg = "with the job_id {0}".format(job_id.upper()) + else: + job_not_found_msg = "with the name {0}".format(job_name.upper()) job = {} job["job_id"] = job_id @@ -99,7 +104,7 @@ def _job_not_found(job_id, owner, job_name, dd_name): job["ret_code"]["msg"] = None job["ret_code"]["code"] = None job["ret_code"]["msg_code"] = None - job["ret_code"]["msg_txt"] = "The job could not be found." + job["ret_code"]["msg_txt"] = "The job {0} could not be found.".format(job_not_found_msg) job["class"] = "" job["content_type"] = "" diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 8cd55dd0f..4b3990ab5 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -31,7 +31,8 @@ """ TEMP_PATH = "/tmp/jcl" -JOB_NOT_FOUND_MSG_TXT="The job could not be found." +JOB_NOT_FOUND_MSG_TXT="The job with the name * could not be found." +JOB_NOT_FOUND_MSG_TXT_ID="The job with the job_id INVALID could not be found." def test_zos_job_output_no_job_id(ansible_zos_module): hosts = ansible_zos_module @@ -46,7 +47,7 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module): results = hosts.all.zos_job_output(job_id="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT + assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT_ID def test_zos_job_output_no_job_name(ansible_zos_module): From 9d886cb9ffa6f13cde3c1598256928d0b49de858 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 5 May 2023 16:43:09 -0600 Subject: [PATCH 107/413] Bugfix/660/zos operator reported failure caused by unrelated error response messages (#762) * Add options * Add transparency on the response and test cases * Solve spaces * Add validation to append * Fragment Added * Adjust fail_json on non_zero response * Identation mistakes solved * Solve last idenation problem --- ...re-caused-by-unrelated-error-response.yaml | 4 ++ plugins/modules/zos_operator.py | 57 +++++++------------ .../modules/test_zos_operator_func.py | 14 ++++- 3 files changed, 36 insertions(+), 39 deletions(-) create mode 100644 changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml diff --git a/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml b/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml new file mode 100644 index 000000000..d7aae1c14 --- /dev/null +++ b/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml @@ -0,0 +1,4 @@ +bugfixes: + - zos_operator - Reported a failure caused by unrelated error response. + Fix now gives a transparent response of the operator to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index a0f66c302..5bd04ba50 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -217,26 +217,18 @@ def run_module(): # short_str is local, and just to check for problem response values. # ssctr is a limit variable so we don't pull more than 5 lines of each. result["content"] = [] - short_str = [] - ssctr = 0 - tstr = rc_message.get("stdout") - if tstr is not None: - for s in tstr.split("\n"): - if s: - result["content"].append(s) - if ssctr < 5: - short_str.append(s) - ssctr += 1 - ssctr = 0 - tstr = rc_message.get("stderr") - if tstr is not None: - for s in tstr.split("\n"): - if s: - result["content"].append(s) - if ssctr < 5: - short_str.append(s) - ssctr += 1 - + stdout = rc_message.get("stdout") + if stdout is not None: + for out in stdout.split("\n"): + if out: + result["content"].append(out) + stderr = rc_message.get("stderr") + error = [] + if stderr is not None: + for err in stderr.split("\n"): + if err: + error.append(err) + result["content"].append(err) # call is returned from run_operator_command, specifying what was run. # result["cmd"] = new_params.get("cmd") result["cmd"] = rc_message.get("call") @@ -247,27 +239,18 @@ def run_module(): # but it could still be a bad/invalid command. # As long as there are more than 2 lines, it's worth looking through. if int(result["rc"]) == 0: - if len(short_str) > 2: + if len(result["content"]) > 2: result["changed"] = True - for linetocheck in short_str: - if "invalid" in linetocheck.lower(): - result["exception"] = "Invalid detected: " + linetocheck - result["changed"] = False - module.fail_json(msg=result["exception"], **result) - elif "error" in linetocheck.lower(): - result["exception"] = "Error detected: " + linetocheck - result["changed"] = False - module.fail_json(msg=result["exception"], **result) - elif "unidentifiable" in linetocheck.lower(): - result["exception"] = "Unidentifiable detected: " + linetocheck - result["changed"] = False - module.fail_json(msg=result["exception"], **result) else: module.fail_json(msg="Expected response to be more than 2 lines.", **result) else: - module.fail_json( - msg="Non-zero response received: " + str(result["rc"]), **result - ) + module.fail_json(msg=("A non-zero return code was received : {0}. Review the response for more details.").format(result["rc"]), + cmd=result["cmd"], + elapsed_time=result["elapsed"], + wait_time_s=result["wait_time_s"], + stderr=str(error) if error is not None else result["content"], + stderr_lines=str(error).splitlines() if error is not None else result["content"], + changed=result["changed"],) except Error as e: module.fail_json(msg=repr(e), **result) except Exception as e: diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 146896e74..dbdb4f065 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -49,8 +49,18 @@ def test_zos_operator_invalid_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="invalid,command", verbose=False) for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("exception") is not None + assert result.get("changed") is True + + +def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_operator(cmd="DUMP COMM=('ERROR DUMP')", verbose=False) + for result in results.contacted.values(): + assert result.get("changed") is True + transparency = False + if any('DUMP COMMAND' in str for str in result.get("content")): + transparency = True + assert transparency def test_zos_operator_positive_path(ansible_zos_module): From 3095388f87d22c340be5386f17fb71aa7d8ed614 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 25 May 2023 17:08:17 -0700 Subject: [PATCH 108/413] Replace prior tooling (makefile) that aidded the development workflow with a new 'ac' command. (#766) * Make file mount script helper Signed-off-by: ddimatos <dimatos@gmail.com> * Comments to mount script Signed-off-by: ddimatos <dimatos@gmail.com> * Staged updated scripts for makefile usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update mount scripts for use with makefile Signed-off-by: ddimatos <dimatos@gmail.com> * updates to correct mounts and add function to mounts-datasets Signed-off-by: ddimatos <dimatos@gmail.com> * adding completed new ac command files for development Signed-off-by: ddimatos <dimatos@gmail.com> * update ignore to more specific with venv Signed-off-by: ddimatos <dimatos@gmail.com> * Correcting ignore to allow for venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * moved logic that checks for info.env to venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Adding changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a path issue when calling venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes issue not being able to run all tests, fixes issue with content being written to collections folder Signed-off-by: ddimatos <dimatos@gmail.com> * Support zSH and update scp to fall back to legacy scp protocal Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Fix incorrect message and remove the cd's before and after ac-test Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .gitignore | 145 +++- Makefile | 744 ---------------- ac | 797 ++++++++++++++++++ .../766-ac-command-replace-makefile.yml | 4 + galaxy.yml | 33 +- make.env.encrypt | 287 ------- scripts/hosts.env | 42 + scripts/info.env.axx | 15 + scripts/mount-shr.sh | 92 -- scripts/mounts.env | 75 ++ scripts/mounts.sh | 700 +++++++++++++-- scripts/profile-shr | 230 ----- scripts/profile.sh | 73 +- scripts/requirements-2.11.env | 35 + scripts/requirements-2.12.env | 32 + scripts/requirements-2.13.env | 32 + scripts/requirements-2.14.env | 32 + scripts/requirements-2.9.env | 35 + scripts/requirements-common.env | 133 +++ scripts/requirements-latest.env | 31 + scripts/venv.sh | 585 +++++++++++++ 21 files changed, 2643 insertions(+), 1509 deletions(-) delete mode 100644 Makefile create mode 100755 ac create mode 100644 changelogs/fragments/766-ac-command-replace-makefile.yml delete mode 100644 make.env.encrypt create mode 100644 scripts/hosts.env create mode 100755 scripts/info.env.axx delete mode 100755 scripts/mount-shr.sh create mode 100644 scripts/mounts.env mode change 100644 => 100755 scripts/mounts.sh delete mode 100755 scripts/profile-shr create mode 100644 scripts/requirements-2.11.env create mode 100644 scripts/requirements-2.12.env create mode 100644 scripts/requirements-2.13.env create mode 100644 scripts/requirements-2.14.env create mode 100644 scripts/requirements-2.9.env create mode 100644 scripts/requirements-common.env create mode 100644 scripts/requirements-latest.env create mode 100755 scripts/venv.sh diff --git a/.gitignore b/.gitignore index 8a66463d2..9c4301951 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ *.rar *.tar *.zip +*.tar.gz ############################# # Output Folders # @@ -56,12 +57,16 @@ Thumbs.db *.bak *.swp -# Byte-compiled / optimized / DLL files +######################################### +# Byte-compiled / optimized / DLL files # +######################################### __pycache__/ *.py[cod] *$py.class -# Distribution / packaging +############################# +# Distribution / packaging # +############################# .Python build/ develop-eggs/ @@ -82,17 +87,24 @@ share/python-wheels/ *.egg MANIFEST +################################################################################ # PyInstaller # Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. +# before PyInstaller builds the exe, so as to inject date/other infos +# into it. +################################################################################ *.manifest *.spec -# Installer logs +#################### +# Installer logs # +#################### pip-log.txt pip-delete-this-directory.txt -# Unit test / coverage reports +################################ +# Unit test / coverage reports # +################################ htmlcov/ .tox/ .nox/ @@ -106,86 +118,115 @@ coverage.xml .hypothesis/ .pytest_cache/ -# Translations +################## +# Translations # +################## *.mo *.pot -# Django stuff: +################### +# Django # +################### *.log local_settings.py db.sqlite3 db.sqlite3-journal -# Flask stuff: +################### +# Flask # +################### instance/ .webassets-cache -# Scrapy stuff: +################### +# Scrapy # +################### .scrapy -# Sphinx documentation +########################## +# Sphinx documentation # +########################## docs/_build/ -# PyBuilder +########################## +# PyBuilder # +########################## target/ -# Jupyter Notebook +########################## +# Jupyter Notebook # +########################## .ipynb_checkpoints -# IPython +########################## +# IPython # +########################## profile_default/ ipython_config.py -# pyenv +########################## +# pyenv # +########################## .python-version +################################################################################ # pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock +# in version control. However, in case of collaboration, if having +# platform-specific dependencies or dependencies having no cross-platform +# support, pipenv may install dependencies that don't work, or not # install all needed dependencies. +################################################################################ #Pipfile.lock -# PEP 582; used by e.g. github.com/David-OConnor/pyflow +#################################################################### +# PEP 582; used by e.g. github.com/David-OConnor/pyflow # +#################################################################### __pypackages__/ -# Celery stuff +############# +# Celery # +############# celerybeat-schedule celerybeat.pid -# SageMath parsed files +########################## +# SageMath parsed files # +########################## *.sage.py -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings +############################ +# Spyder project settings # +############################ .spyderproject .spyproject -# Rope project settings +############################ +# Rope project settings # +############################ .ropeproject -# mkdocs documentation +############################ +# mkdocs documentation # +############################ /site -# mypy +############ +# mypy # +############ .mypy_cache/ .dmypy.json dmypy.json -# Pyre type checker +##################### +# Pyre type checker # +##################### .pyre/ - *.retry -# Visual Studio Code workspace configuration files +##################################################### +# Visual Studio Code workspace configuration files # +##################################################### .vscode/* .vscode/ !.vscode/tasks.json @@ -194,11 +235,31 @@ dmypy.json *.code-workspace .vscode/settings.json -# Development files -hosts +########################## +# Environments # +########################## +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +################################### +# Ansible z/OS Core Development # +################################### .ansible-test/ -.keep +.cache +.DS_Store +.python-version +.pytest_cache +info.env shell_exploits.txt -test_config.yml -make.env.encrypt -make.env \ No newline at end of file + +################################################################################ +# Debugging .ignore, if you want to know why a particular file is being ignored +# and by which rule, try `git check-ignore -v <file>` +# e.g. `git check-ignore -v venv/` +# .gitignore:244:venv/ venv/ +################################################################################ \ No newline at end of file diff --git a/Makefile b/Makefile deleted file mode 100644 index 4f1f6f58e..000000000 --- a/Makefile +++ /dev/null @@ -1,744 +0,0 @@ -# ============================================================================== -# Copyright (c) IBM Corporation 2022 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Makefile is used to assist with development tasks such as running tests cases -# or setting up a python virtual environment. -# This makefile relies on shell script `make.env` which should not be renamed. -# The contents of the `make.env` are encrypted to adhere to coporate operational -# requiements. If you need to edit the `make.env` be sure to use this makefile -# to access the script: -# (1) make decrypt <enter password at prompt> -# (2) vi/edit script the contents as needed -# (3) make encrypt <enter same password used to decrypt> -# While of some of the targets work without a venv, it's higly recommended you -# instruct make to create you a venv where it will perform operations: -# (1) make vsetup -# Optionally you can override the makefile's env var VENV to instruct it to -# create a `venv` based on your requiements.txt, you can do this by: -# (1) export VENV=venv-2.11 -# (2) make vsetup req=requirements-ac-2.11.12.txt -# Now all make targets will use the venv you assigned to the exported variable -# and also a directory `venv-2.11` will be created and populated with files used -# by make. You may consider pyvenv so that you can change your python versions -# to meet the needs of the various ansible-core versions. -# ============================================================================== - -# ============================================================================== -# GLOBAL VARS -# ============================================================================== - -CURR_DIR := $(shell pwd) -WHO := $(shell whoami) -HOST_PYTHON = python3 -# VENV = venv -# VENV := $(shell echo $$VENV) -VENV := $(shell echo "$${VENV:-venv}") -VENV_BIN=$(VENV)/bin - -ZOS_PYTHON_DEFAULT=3.8 -ZOAU_DEFAULT=1.1.1 -# Test if docker is running -DOCKER_INFO := $(shell docker info> /dev/null 2>&1;echo $$?) - -# Unit test to skip -SKIP = tests/functional/modules/test_module_security.py -divider="====================================================================" - -.PHONY: help Makefile -# ============================================================================== -# Makefile -# ============================================================================== - -# ============================================================================== -# Run a bandit security scan on the plugin directory -# ============================================================================== -## Run a bandit security scan on the plugins directory, set the severity level. -## Options: -## level - choose from 'l', 'll', 'lll' -## - l all low, medium, high severity -## - ll all medium, high severity -## - lll all hight severity -## Example: -## $ make bandit sev=ll -## $ make bandit sev=l -bandit: - ifdef sev - @echo $(divider); - @echo "Running Bandit scan with sev=${sev}"; - @echo $(divider); - @. $(VENV_BIN)/activate && bandit -r plugins/* -${sev} - else - @echo "No bandit sev (severity) has been set." - endif - - -# ============================================================================== -# Build the current collection based on the git branch local to the computer. -# Currently, venv's only manage python packages, colleciton installation is managed -# with paths, if we wwanted to install it in the venv to not dirty the host, we -# could try building a similar command to pythons venv: -# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections -# ============================================================================== -## Build and installa collection of the current branch checked out -## Example: -## $ make build -build: - @echo $(divider) - @echo "Building Ansible collection based on local branch and installing." - @echo $(divider) - - @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ - ansible-galaxy collection build && \ - ansible-galaxy collection install -f ibm-ibm_zos_core-* - - -## Build the changelog, this should be a release activity otherwise the generated -## files should not be checked in. -## Example: -## $ make buildChglog -buildChglog: - @. $(VENV_BIN)/activate && antsibull-changelog release - - -## Update the documentation for the collection after module doc changes have been -## made. This simply calls the make file in the docs directory, see the make file -## there for additional options. -## Example: -## $ make buildDoc -buildDoc: - @. $(VENV_BIN)/activate && make -C docs clean - @. $(VENV_BIN)/activate && make -C docs module-doc - @. $(VENV_BIN)/activate && make -C docs html - @. $(VENV_BIN)/activate && make -C docs view-html - - -# ============================================================================== -# Cleanup and teardown based on user selection -# ============================================================================== -## Cleanup and teardown the environment based on the level selected. -## Options: -## level - choose from 'min', 'all' -## - 'all' will remove the venv, restore any temporarily located files -## and ensure config is encrypted -## - 'min' will restore any temporarily located files -## and ensure config is encrypted -## Example: -## $ make clean level=all -## $ make clean level=min -clean: - ifdef level - ifeq ($(level),all) - @echo $(divider) - @echo "Complete teardown selected." - @echo $(divider) - - @echo $(divider) - @echo "Deleting python virtual environment 'venv'." - @echo $(divider) - @rm -rf $(VENV) - endif - - ifeq ($(level),min) - @echo $(divider); - @echo "Minimum teardown selected."; - @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; - @echo $(divider); - @rm -rf $(VENV)/make.env - @rm -rf $(VENV)/mount-shr.sh - @rm -rf $(VENV)/profile-shr - endif - - @if test -e tests/functional/modules/test_module_security.txt; then \ - echo $(divider); \ - echo "Restoring 'test_module_security.py', previously removed to avoid execution."; \ - echo $(divider); \ - mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ - fi - - # Unsure really need or even want to do this as part of cleanup - # @if test -e make.env; then \ - # echo $(divider); \ - # echo "Found uncrypted files, encrypting them."; \ - # echo $(divider); \ - # make encrypt; \ - # fi - else - @echo $(divider) - @echo "Default teardown, deleting $(VENV)" - @echo $(divider) - @rm -rf $(VENV) - endif - - -## Cleanup and remove geneated doc for the collection if its not going to be -## checked in -## Example: -## $ make cleanDoc -cleanDoc: - @. $(VENV_BIN)/activate && make -C docs clean - - -## Copy your ssh key to a `host` or the default which is your username. If you are -## copying a key to a production server, a second key will be copied used by the -## jenkins node, this minimizes the number of times you must copy a key. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` -## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system -## Example: -## $ make copyKey host=ec33012a -## $ make copyKey -copyKey: - @echo $(divider) - @echo "Copying SSH keys to the managed node authorized_keys." - @echo $(divider) - - ifdef host - @${VENV}/./make.env --cert ${host} - else - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --cert ${username} - endif - - -## Decrypt all scripts used with this Makefile using the user specified password -## Files include: ["mount-shr.sh", "profile-shr", "make.env"] -## If no password is provided, you will be prompted to enter a password for each -## file being decrypted. -## Example: -## $ make encrypt password= -## $ make decrypt -decrypt: - @# -------------------------------------------------------------------------- - @# Check configuration files exit - @# -------------------------------------------------------------------------- - #@if test ! -e scripts/mount-shr.sh.encrypt; then \ - # echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ - # exit 1; \ - #fi - - #@if test ! -e scripts/profile-shr.encrypt; then \ - # echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ - # exit 1; \ - #fi - - @if test ! -e make.env.encrypt; then \ - echo "File 'make.env.encrypt' not found in $(CURR_DIR)"; \ - exit 1; \ - fi - - @# ------------------------------------------------------------------------- - @# Decrypt configuration files - @# ------------------------------------------------------------------------- - ifdef password - #@echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin - #@chmod 700 scripts/mount-shr.sh - - #@echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin - #@chmod 700 scripts/profile-shr - - @echo "${password}" | openssl bf -d -a -in make.env.encrypt -out make.env -pass stdin - @chmod 700 make.env - else - #@openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh - #@chmod 700 scripts/mount-shr.sh - - #@openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr - #@chmod 700 scripts/profile-shr - - @openssl bf -d -a -in make.env.encrypt -out make.env - @chmod 700 make.env - endif - - -## Encrypt the configuration files with a `.encrypt` suffix for files -## [make.env, mount-shr.sh, profile-shr] with user specified password. -## If no password is provided, you will be prompted to enter a password for each -## file being encrypted. -## Example: -## $ make encrypt password= -## $ make encrypt -## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. -encrypt: - @# -------------------------------------------------------------------------- - @# Check to see if there is an unencrypted file(s) to encrypt, you would not - @# want to delete the encrypted version if the unecrypted is not present as - @# there would be no recovery process. Then check to see if there an - @# encrypted version of the file, if so delete it. - @# -------------------------------------------------------------------------- - @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ - echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ - rm -rf make.env.encrypt; \ - fi - - # @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ - # echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ - # rm -rf scripts/mount-shr.sh.encrypt; \ - # fi - - # @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ - # echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ - # rm -rf scripts/profile-shr.encrypt; \ - # fi - - @# -------------------------------------------------------------------------- - @# Encrypt the files since we have verified the uncrypted versions exist - @# Note: we should move make.env to scripts as well - @# -------------------------------------------------------------------------- - - ifdef password - - #ifneq ("$(wildcard scripts/mount-shr.sh)","") - # @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin - # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - # @rm -f scripts/mount-shr.sh - #endif - - #ifneq ("$(wildcard scripts/profile-shr)","") - # @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin - # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - # @rm -f scripts/profile-shr - #endif - - ifneq ("$(wildcard make.env)","") - @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env - endif - - else - #ifneq ("$(wildcard scripts/mount-shr.sh)","") - # @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt - # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - # @rm -f scripts/mount-shr.sh - #endif - - #ifneq ("$(wildcard scripts/profile-shr)","") - # @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt - # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - # @rm -f scripts/profile-shr - #endif - - ifneq ("$(wildcard make.env)","") - @openssl bf -a -in make.env -out make.env.encrypt - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env - endif - endif - - -# ============================================================================== -# Self documenting code that when comments are created as expected, the help -# is auto generated. Supports multiline comments when comments are prefixed with -# 2 pound signs and a space, see examples in this makefile. -# ============================================================================== -## Help on how how to use this Makefile, options and examples. -help: - @awk '{ \ - if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \ - helpCommand = substr($$0, index($$0, ":") + 2); \ - if (helpMessage) { \ - printf "\033[36m%-20s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^[a-zA-Z\-\_0-9.]+:/) { \ - helpCommand = substr($$0, 0, index($$0, ":")); \ - if (helpMessage) { \ - printf "\033[36m%-10s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^##/) { \ - if (helpMessage) { \ - helpMessage = helpMessage"\n "substr($$0, 3); \ - } else { \ - helpMessage = substr($$0, 3); \ - } \ - } else { \ - if (helpMessage) { \ - print "\n "helpMessage"\n" \ - } \ - helpMessage = ""; \ - } \ - }' \ - $(MAKEFILE_LIST) - - -# ============================================================================== -# Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) -# ============================================================================== -## Install a collection from galaxy and specify the version. -## Options: -## version - any GA and beta versions currently on Galaxy -## Example: -## $ make install 1.4.0-beta.1 -## $ make install -install: - ifdef version - @echo $(divider); - @echo "Installing 'ibm.ibm_zos_core' collection version=${version}."; - @echo $(divider); - @. $(VENV_BIN)/activate && ansible-galaxy collection install -fc ibm.ibm_zos_core:${version} - else - @echo $(divider); - @echo "Installing latest non-beta 'ibm.ibm_zos_core' collection."; - @echo $(divider); - @. $(VENV_BIN)/activate && ansible-galaxy collection install -fc ibm.ibm_zos_core - endif - - -## Copy your ssh key to a `host` or the default which is your username. Then -## copy the super share mount script and profile for the mounts, execute the -## mount script and exit, upon rmote ssh, `profile-shr` will be located -## at `/u/${user} where user is defined in the make.env `host_list`. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` -## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system -## Example: -## $ make mountProfile host=ec33012a -## $ make mountProfile -mountProfile: - ifdef host - @make copyKey host=${host} - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" - else - @make copyKey - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr - endif - - -# ============================================================================== -# Print the configuration used to connect to the managed node for functional tests -# ============================================================================== -## Print the contents of the config file (venv/config.yml) which is used to -## connect to the managed z/OS node to run functional tests on. This will only -## be available if yo have set up a venv using `make vsetup` because a password -## is required to generate the config and is considered sensitive content per -## corporate policy. -## Example: -## $ make printConfig -printConfig: - @if test -e $(VENV)/config.yml; then \ - cat $(VENV)/config.yml; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -# ============================================================================== -# Print the make.env contents -# ============================================================================== -## Print the contents of the venv/make.env, this only works if -## you have set up a venv using `make vsetup` because a password is required to -## decrypt and a decrypted copy will be placed in the venv. -## Example: -## $ make printEnv -printEnv: - @if test -e $(VENV)/make.env; then \ - cat $(VENV)/make.env; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -# ============================================================================== -# Print the make.env contents -# ============================================================================== -## Print the contents of the venv/mount-shr.sh, this only works if -## you have set up a venv using `make vsetup` because a password is required to -## decrypt and a decrypted copy will be placed in the venv. -## Example: -## $ make printMount -printMount: - @if test -e $(VENV)/mount-shr.sh; then \ - cat $(VENV)/mount-shr.sh; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -# ============================================================================== -# Print the make.env contents -# ============================================================================== -## Print the contents of the venv/profile-shr, this only works if -## you have set up a venv using `make vsetup` because a password is required to -## decrypt and a decrypted copy will be placed in the venv. -## Example: -## $ make printEnv -printProfile: - @if test -e $(VENV)/profile-shr; then \ - cat $(VENV)/profile-shr; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -## Display the z/OS managed nodes available and configured. This will show which -## systems you can use in the host argument for `make test host<....>` -## Example: -## $ make printTargets -printTargets: - @${VENV}/./make.env --targets - - -# ============================================================================== -# Run the sanity test using docker given python version else default to venv -# ============================================================================== -## Run sanity tests either in the virtual environment (venv) or docker if there is a running docker engine -## Options: -## version - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', no selection will run all available python versions -## Example: -## $ make sanity version=3.8 -## $ make sanity -sanity: - ifeq ($(DOCKER_INFO),0) - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements --docker default && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements --docker default && \ - cd $(CURR_DIR); - endif - else - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements && \ - cd $(CURR_DIR); - endif - endif - - -# ============================================================================== -# Run functional tests: -# ============================================================================== -## Run collection functional tests inside the python virtual environment (venv) -## Options: -## host - z/OS managed node to run test cases, no selection will default to -## a system registerd to your user name, see make.env -## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11 -## no selection defauls to 3.8 -## zoau - Z Open Automation Utilites to use with the collection, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1 -## no selection defaults to 1.1.1 -## name - the absoulte path to a particluar test case to run, no selection -## will default to all test cases running. -## debug - enable debug for pytest (-s), any value will result in true enabling -## debug, default is to not define a value so that it evaluates to false -## Example: -## $ make test (runs all tests using default users system and dependencies) -## $ make test name=tests/functional/modules/test_zos_copy_func.py debug=true (run specific test and debug) -## $ make test host=ec33012a python=3.9 zoau=1.1.1 name=tests/functional/modules/test_zos_copy_func.py debug=true -test: - @# -------------------------------------------------------------------------- - @# Expecting the zOS host, python version and zoau version to use with - @# generating a configuration for us with zTest helper. - @# -------------------------------------------------------------------------- - - ifdef host - ifdef python - ifdef zoau - @echo $$(${VENV}/./make.env --config ${host} ${python} ${zoau})>$(VENV)/config.yml - else - @echo "Option 'zoau=<version>' was not set, eg zoau=1.1.1" - @exit 1 - endif - else - @echo "No python version option was set, eg python=3.8" - @exit 1 - endif - else - @# -------------------------------------------------------------------------- - @# When a quick test with no options and defaults are acceptable, a - @# lookup using the users usersname is mapped to a default of known - @# zos targets registered in make.env - @# -------------------------------------------------------------------------- - - $(eval username := $(shell whoami)) - echo $$(${VENV}/./make.env --config ${username} ${ZOS_PYTHON_DEFAULT} ${ZOAU_DEFAULT})>$(VENV)/config.yml - - endif - - @# -------------------------------------------------------------------------- - @# Check configuration was created in venv/config.yml, else error and exit - @# -------------------------------------------------------------------------- - - @if test ! -e $(VENV)/config.yml; then \ - echo "No configuration created in $(VENV)/config.yml "; \ - exit 1; \ - fi - - @# -------------------------------------------------------------------------- - @# Check if name='a specific test' and if debug was set, else run all tests - @# -------------------------------------------------------------------------- - - ifdef name - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml - endif - else - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml - endif - endif - - -# ============================================================================== -# Check the version of the ibm_zos_core collection installed -# ============================================================================== -## Get the version of the ibm_zos_core collection installed -## Example: -## $ make version -version: - @echo $(divider) - @echo "Obtaining Ansible collection version installed on this controller." - @echo $(divider) - - @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ - |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; - -# ============================================================================== -# Setup the python virtual environment, the default name is 'venv'. You can -# override the default name by exporting the variable VENV: -# (1) export VENV=venv-2.11 -# (2) make vsetup req=requirements-ac-2.11.12.txt -# @test -d $(VENV) || $(HOST_PYTHON) -m venv $(VENV) -# ============================================================================== -## Create a python virtual environment (venv) based on the hosts python3 -## Options: -## req - your requirements.txt else a default one will be used -## Example: -## $ make vsetup -## $ make vsetup req=path/to/requirements.txt -## -## Override the default virtual environment name 'venv' by exporting var VENV -## $ export VENV=venv-2.11 -## $ make vsetup req=requirements-ac-2.11.12.txt -vsetup: - - @# ------------------------------------------------------------------------- - @# Create the virtual environment directory if it does not exist - @# ------------------------------------------------------------------------- - @if test ! -d $(VENV); then \ - echo $(divider); \ - echo "Creating python virtual environment directory $(VENV)."; \ - echo $(divider); \ - $(HOST_PYTHON) -m venv $(VENV); \ - else \ - echo "Virtual environment already exists, no changes made."; \ - fi - - @# ------------------------------------------------------------------------- - @# Check if files exist in venv, if they do we should not decrypt/replace - @# them as they could have edits and risk losing them. - @# ------------------------------------------------------------------------- - - @if test ! -e $(VENV)/make.env && \ - test ! -e $(VENV)/mount-shr.sh && \ - test ! -e $(VENV)/profile-shr; then \ - echo $(divider); \ - echo "Decrypting files into $(VENV)."; \ - echo $(divider); \ - make decrypt; \ - mv make.env $(VENV)/; \ - mv scripts/mount-shr.sh $(VENV)/; \ - mv scripts/profile-shr $(VENV)/; \ - else \ - echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ - fi - - ifdef req - @if test -f ${req}; then \ - echo $(divider); \ - echo "Installing user provided python requirements into $(VENV)."; \ - echo $(divider); \ - cp ${req} ${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - fi - else - @if test ! -e $(VENV)/requirements.txt; then \ - echo $(divider); \ - echo "Installing default python requirements into $(VENV)."; \ - echo $(divider); \ - echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - else \ - echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ - fi - endif - - -# ============================================================================== -# You don't need to activate your venv with this Makefile, but should you want -# to, you can with vstart. -# ============================================================================== -## Start the venv if you plan to work in a python virtual environment -## Example: -## $ make vstart -vstart: - @echo $(divider) - @echo "Activating python virtual environment 'venv', use 'vstop' to deactivate." - @echo $(divider) - @. $(VENV_BIN)/activate; exec /bin/sh -i - - -# ============================================================================== -# Deactivate your venv -# ============================================================================== -## Deactivate (stop) the venv -## Example: -## $ make vstop -vstop: - @echo $(divider) - @echo "Deactivate python virtual environment 'venv'." - @echo $(divider) - @. deactivate - - -# ============================================================================== -# Unused but maybe can repurpose code snippets -# ============================================================================== -# Build the command, this is not run initially -# CMD_CONFIG := $(shell $(VENV)/./make.env --config ${host} ${python} ${zoau}) -# Define the executible `GEN_CONFIG` and assign it to CONFIG -# GEN_CONFIG = $(eval CONFIG=$(CMD_CONFIG)) - -# ============================================================================== -# Makefile tip: -# ============================================================================== -# If you have formatting issues; try `cat -e -t -v Makefile`. -# ^I represent tabs and $'s represent end of the line. -# -# If you need to debug your makefile command, use `-nd`, eg `make -nd vstop` diff --git a/ac b/ac new file mode 100755 index 000000000..b01fa8bf8 --- /dev/null +++ b/ac @@ -0,0 +1,797 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# Global Vars +# ============================================================================== + +# Note: using the venv.sh script to find the latest venv puts the current 'ac' +# in the scripts directory because venv.sh performs a 'cd $(dirname $0)' and +# then other scripts can't be found in the managed venv corectly. Although this +# is probably a temporary solution, we now take the same code from venv.sh and +# use it here in 'ac' to find the latst managed venv. What should be +# done is to have some meta-data written out to venv/* that this command 'ac' +# can easily find, might be helpful to have some stats like dates created and +# so on. +# VENV=`scripts/./venv.sh --latest_venv` + +VENV_HOME_MANAGED=${PWD%/venv}/venv + +# Lest normalize the version from 3.10.2 to 3010002000 +# Do we we need that 4th octet? +normalize_version() { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; +} + +latest_venv(){ + dir_version_latest="0" + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` + + if [ ! -z "$test_for_managed_venv" ]; then + for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | cut -d"-" -f2`; do + if [ $(normalize_version $dir_version) -ge $(normalize_version $dir_version_latest) ]; then + dir_version_latest=$dir_version + fi + done + echo "${VENV_HOME_MANAGED}"/"venv-"$dir_version_latest + fi +} + +VENV=`latest_venv` + +file="" +verbose=0 +DIV="-----------------------------------------------------------------------" +CURRENT_DIR=`pwd` +cd $CURRENT_DIR +# VENV_BIN should equate to same as $VIRTUAL_ENV after the venv activate +if [ ! -z "$VENV" ]; then + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` +fi + +CURR_DIR=`pwd` +RED=$'\e[1;31m' +GRN=$'\e[1;32m' +YEL=$'\e[1;33m' +BLU=$'\e[1;34m' +MAG=$'\e[1;35m' +CYN=$'\e[1;36m' +ENDC=$'\e[0m' +# 0 Docker is up, 1 docker is not up +DOCKER_INFO=`docker info> /dev/null 2>&1;echo $?` + +# ============================================================================== +# Arg parsing helpers +# ============================================================================== +terminate() { + printf '%s\n' "$1" >&2 + exit 1 +} + +message(){ + echo $DIV; + echo "$1"; + echo $DIV; +} + +ensure_managed_venv_exists(){ + if [ -z "$VENV" ]; then + echo "Option $1 requires that a managed virtual environment be configured. "\ + "Run $0 -venv-setup to create managed viritual environments. "\ + "For additional optons, use $0 --help." + exit 1 + fi +} + +# ------------------------------------------------------------------------------ +# Generate simple formated but incomplete help +# ------------------------------------------------------------------------------ +# usage_simple(){ +# script="$0" +# base_name_script=`basename "$script"` +# grep '^##' "$script" | sed -e 's/^##//' -e "s/_PROG_/$base_name_script/" 1>&2 +# } + +# ------------------------------------------------------------------------------ +# This method auto generates help based on the comments found in this script. +# ----------------+------------------------------------------------------------- +# Comment style | Description +# ----------------+------------------------------------------------------------- +# '#->command:' | `#->` followed by a keyword is the help command displayed +# ----------------+------------------------------------------------------------- +# '## ' | The first found pattern after a help command will be the +# | help command description. Subsequent such patterns will be +# | right justified and considered options or descriptions +# ----------------+------------------------------------------------------------- +# '# ' | This pattern is ignored and considered script comments +# ----------------+------------------------------------------------------------- +# ------------------------------------------------------------------------------ +#->help: +## Print help message (-h, -? produce short version, otherwise verbose) +## Usage: ac [-h, -?, --help] +## Example: +## $ ac --help +help(){ + if [ "$1" = "verbose" ]; then + awk '{\ + if (($0 ~ /^#->[a-zA-Z\-\_0-9.]+:/)) { \ + helpCommand = substr($0, 4, index($0, ":")); \ + helpMessage ="";\ + } else if ($0 ~ /^##/) { \ + if (helpMessage) { \ + helpMessage =" "substr($0, 3); \ + } else { \ + helpMessage = substr($0, 3); \ + } \ + if (helpCommand && helpMessage) {\ + printf "\033[36m%-16s\033[0m %s\n", helpCommand, helpMessage; \ + helpCommand =""; \ + commandContext=" Supports format: <option> <value> and <option>=<value>";\ + print commandContext;\ + } else {\ + print helpMessage + } + } + }' $0 + else + awk '{\ + if (($0 ~ /^#->[a-zA-Z\-\_0-9.]+:/)) { \ + helpCommand = substr($0, 4, index($0, ":")); \ + helpMessage ="";\ + } else if ($0 ~ /^##[[:space:]][[:space:]]*\$[[:space:]]ac/) { \ + helpMessage = substr($0, 3); \ + if (helpCommand && helpMessage) {\ + printf "\033[36m%-16s\033[0m %s\n", helpCommand, helpMessage; \ + helpCommand =""; \ + } else {\ + helpMessage=" "substr($0, 3); \ + print helpMessage + } + } + }' $0 + fi +} + +# The case stmt sees it this way: +# --foo abc ---> $1 = foo, $2 = abc +# --foo=abc ---> $1 = --foo=abc +option_processor(){ + + opt=$1 + arg=$2 + if [ "$arg" ]; then + echo $arg + elif [ "$opt" ]; then + # Split up to "=" and set the remainder + value=${opt#*=} + # If the value is not the same as the option ($1),then assign it . + if [ "$opt" != "$value" ]; then + echo $value + else + # Don't echo, will return from the function, send to error msg to stderr + ERROR_MSG="${RED}ERROR${ENDC}: option $option requires a non-empty argument." + printf '%s\n' "$ERROR_MSG" >&2 + echo "exit 1" + fi + fi +} + +option_sanitize(){ + option_value=$1 + $option_value 2> /dev/null +} + +# ============================================================================== +# Commands +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Run a bandit security scan on the plugin directory +# ------------------------------------------------------------------------------ +#->ac-bandit: +## Run a bandit security scan on the plugins directory, set the severity level. +## Usage: ac [-s <level>, --bandit <level>] +## Usage: ac [-s <level>, --bandit <level>] +## <level> - choose from 'l', 'll', 'lll' +## - l all low, medium, high severity +## - ll all medium, high severity +## - lll all high severity +## Example: +## $ ac --ac-bandit --level ll +## $ ac --ac-bandit +ac_bandit(){ + option_level=$1 + if [ ! "$option_level" ]; then + option_level="ll" + fi + message "Running Bandit scan with level '$option_level'" + . $VENV_BIN/activate && python3 -m bandit -r plugins/* -"${option_level}" +} + +# ------------------------------------------------------------------------------ +# Build and install collection of the local GH branch. +# To not dirty the host, consider installing in the venv: +# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections +# ------------------------------------------------------------------------------ +#->ac-build: +## Build and install collection of the local GH branch. +## Usage: ac [-b, --ac-build] +## Example: +## $ ac --ac-build +ac_build(){ + gh_branch=`git branch |grep "*" | cut -d" " -f2` + message "Build and install collection of the local GH branch: '$gh_branch'." + . $VENV_BIN/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ + $VENV_BIN/ansible-galaxy collection build && \ + $VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-* +} + +# ------------------------------------------------------------------------------ +# Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) +# ------------------------------------------------------------------------------ +#->ac-install: +## Install collection 'ibm_zos_core' from a repository such as Galaxy. If no +## version is specified, latest GA level in repository will be installed. +## Usage: ac [--ac-install] [--version <version>] +## Options: +## version - The collection version +## Example: +## $ ac --ac-install --version 1.5.0-beta.1 +## $ ac --ac-install +ac_install(){ + option_version=$1 + + if [ "$option_version" ];then + message "Installing 'ibm.ibm_zos_core' collection version=${option_version}." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core:${option_version} + else + message "Installing 'ibm.ibm_zos_core' lastet GA version." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core + fi +} + +# ------------------------------------------------------------------------------ +# Run the sanity test using docker given python version else default to venv +# ------------------------------------------------------------------------------ +#->ac-sanity: +## Run ansible-test in docker if the docker engine is running, else run them in +## a managed virtual environment using the installed python version. +## Usage: ac [--ac-lint] [--version <version>] +## Options: +## <version> - Only applies to when docker is running. +## - No version selection will run all available python versions in docker. +## - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', .... +## Example: +## $ ac --ac-sanity +## $ ac --ac-sanity --version 3.10 +ac_sanity(){ + option_version=$1 + if [ "${DOCKER_INFO}" == "0" ]; then + if [ "${option_version}" ]; then + message "Running ansible-test with docker container and python version ${option_version}." + . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ${VENV_BIN}/ansible-test sanity --python ${option_version} --requirements --docker default && \ + cd ${CURR_DIR}; + else + message "Running ansible-test with docker container and all python versions." + . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ${VENV_BIN}/ansible-test sanity --requirements --docker default && \ + cd ${CURR_DIR}; + fi + else + if [ "${option_version}" ]; then + message "Docker engine is not running, version ${option_version} will be ignored." + fi + + . $VENV_BIN/activate && VENV_PY_VER=`python3 --version | cut -d" " -f2 | cut -d"." -f1,2` + message "Running ansible-test with managed python virtual environment: ${VENV}." + . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ${VENV_BIN}/ansible-test sanity --python ${VENV_PY_VER} --requirements && \ + cd ${CURR_DIR}; + fi +} + +# ------------------------------------------------------------------------------ +# Run functional tests: +# ------------------------------------------------------------------------------ +#->ac-test: +## Run the functional tests inside the managed python virtual environment. +## Usage: ac [--ac-test] [--host <host>] [--python <python>] [--zoau <zoau>] [--file <file>] [--debug <boolean>] +## Options: +## host - z/OS managed node to run test cases, no selection defaults to +## a host registerd to your user id (`whoami`). +## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11, +## no selection defauls to 3.8. +## zoau - ZOAU to use in testing, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1, +## no selection defaults to 1.1.1 . +## file - the absoulte path to a test suite to run, no selection +## defaults to all tests running. +## debug - enable debug for pytest (-s), choices are true and false +## Example: +## $ ac --ac-test --host ec33012a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --debug true +## $ ac --ac-test --file tests/functional/modules/test_zos_operator_func.py --debug true +## $ ac --ac-test +ac_test(){ + host=$1 + python=$2 + zoau=$3 + file=$4 + debug=$5 + skip=$CURR_DIR/tests/functional/modules/test_module_security.py + + # Create the config always overwriting existing + ${VENV}/./venv.sh --config ${host} ${python} ${zoau} ${VENV} + + # Check configuration was created in venv/config.yml, else error and exit + if test ! -e ${VENV}/config.yml; then + echo "No configuration was able to be created in ${VENV}/config.yml " + exit 1 + fi + + #cd ${VENV_BIN} + + if [ "$file" ]; then + if [ "$debug" ]; then + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s + else + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml + fi + else + for file in `ls tests/functional/modules/*.py`; do + # For some reason '--ignor'e not being honored so injecting a work around + if [ "$file" != "$skip" ]; then + if [ "$debug" ]; then + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s + else + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml + fi + fi + done + fi + + #cd ${CURR_DIR} +} + +# ------------------------------------------------------------------------------ +# Print the configuration used to connect to the managed node for functional tests +# ------------------------------------------------------------------------------ +#->ac-test-config: +## Disply the contents of configuration file used to run functional tests. +## Usage: ac [--ac-test-config] +## Example: +## $ ac --ac-test-config +ac_test_config(){ + if [ -f "${VENV}/config.yml" ]; then + message "Print test configuration used for functional testing." + cat ${VENV}/config.yml; + else + message "No configuration was found, run '--ac-test' to generate a configuration." + fi +} + +# ------------------------------------------------------------------------------ +# Check the version of the ibm_zos_core collection installed +# ------------------------------------------------------------------------------ +#->ac-version: +## Obtain the version of the collection installed on the controller. +## Usage: ac [--ac-version] +## Example: +## $ ac --ac-version +ac_version(){ + message "Ansible collection version installed on this controller." + cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ + | grep version|cut -d ':' -f 2 | sed 's/,*$//g' | tr -d '"'; +} + +# ------------------------------------------------------------------------------ +# Encrypt a file, it the users responsiblity to remove the uncrypted file +# afterwards, also ensure it does not end up in a public rep such that it is +# in .gitignore +# Consider adding salt +# # encrypt file.txt to file.enc using 256-bit AES in CBC mode +# openssl enc -aes-256-cbc -salt -in file.txt -out file.enc +# the same, only the output is base64 encoded for, e.g., e-mail +# openssl enc -aes-256-cbc -a -salt -in file.txt -out file.enc +# decrypt binary file.enc +# openssl enc -d -aes-256-cbc -in file.enc -out file.txt +# decrypt base64-encoded version +# openssl enc -d -aes-256-cbc -a -in file.enc -out file.txt +# ------------------------------------------------------------------------------ +#->file-encrypt: +## Encrypt a file as a new file +## Usage: ac [--file-encrypt --file <file> --out-file <file> --password <password>] +## Options: +## file - the file to encrypt. +## out-file - the encrypted output. +## password - the key (password) used to encrypt the file. +## Example: +## $ ac --file-encrypt --file some.txt --out-file some.txt.axx --password 12345678 +file_encrypt(){ + option_file=$1 + option_out_file=$2 + option_pass=$3 + + if [ ! "$option_file" ] || [ ! "$option_out_file" ] || [ ! "$option_pass" ]; then + message "Unable to encrpyt file, missing option values." + exit 1 + fi + message "Encrypting file $option_file as file $option_out_file." + touch $option_out_file + chmod 700 $option_out_file + echo "${option_pass}" | openssl enc -aes-256-cbc -a -salt -in $option_file -out $option_out_file -pass stdin + #echo "${option_pass}" | openssl bf -a -in $option_file -out $option_out_file -pass stdin +} + +# ------------------------------------------------------------------------------ +# decrypt a file, it the users responsiblity to remove the uncrypted file +# afterwards, also ensure it does not end up in a public rep such that it is +# in .gitignore +# ------------------------------------------------------------------------------ +#->file-decrypt: +## Decrypt a file as a new file and apply file permissions 700, RWX only to the owner. +## Usage: ac [--file-decrypt --file <file> --out-file <out-file> --password <password>] +## Options: +## file - the file to decrypt. +## out-file - the decrypted output +## password - the key (password) used when encrypting the file. +## Example: +## $ ac --file-decrypt --file some.txt.axx --out-file some.txt --password 12345678 +file_decrypt(){ + option_file=$1 + option_out_file=$2 + option_pass=$3 + if [ ! "$option_file" ] || [ ! "$option_out_file" ] || [ ! "$option_pass" ]; then + message "Unable to decrpyt file, missing option values." + exit 1 + fi + message "Decrypting the file ${option_file} as ${option_out_file}." + #echo "${option_pass}" | openssl bf -d -a -in $option_file -out $option_out_file -pass stdin + echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in $option_file -out $option_out_file -pass stdin + chmod 700 $option_out_file +} + + +# Cleanup and remove geneated doc for the collection if its not going to be +# checked in +# Example: +# $ make cleanDoc +clean(){ + echo Todo + # @. $(VENV_BIN)/activate && make -C docs clean +} + +clean_doc(){ + echo Todo + # cleanDoc + # @. $(VENV_BIN)/activate && make -C docs clean +} + +#->host-auth: +## Copy your ssh key to a `host` or the default which is your username. +## Usage: ac [--host-auth] [--host <host>] +## Options: +## host - z/OS managed node, no selection defaults to +## a host registerd to your user id (`whoami`). +## Example: +## $ ac --host-auth --host ec33012a +host_auth(){ + option_host=$1 + if [ ! "$option_host" ]; then + host=`whoami` + fi + message "Copy SSH keys to the managed node $option_host" + $VENV/./venv.sh --cert $option_host +} + +# ------------------------------------------------------------------------------ +# Copy mount & profile scripts to users home directory on the target and execute +# mount script. Should automatically authenticate your ssh key. +# ------------------------------------------------------------------------------ +#->host-mount: +## Copy mount and profile scripts in users home directory and excute the mount. +## Usage: ac [--host-mount --host <host>] +## Options: +## host - z/OS managed node id, no selection defaults to +## a host registerd to your user id (`whoami`). +## Example: +## $ ac --host-mount --host ec33012a +host_mount(){ + option_host=$1 + if [ ! "$option_host" ]; then + option_host=`whoami` + fi + host_auth $option_host + message "Copying mount.env, mount.sh, profile.sh scripts to host $option_host and then mounting shared drive." + #$VENV/./hosts.sh --cert $1 + $VENV/./venv.sh --host-setup-files $option_host $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"profile.sh" + #$VENV/./hosts.sh --mount $1 $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"shell-helper.sh" $VENV/"profile.sh" +} + +# ------------------------------------------------------------------------------ +# Print the mount table contents +# ------------------------------------------------------------------------------ +#->host-mounts: +## Print the ZOAU and Python mount tables used by this utility. +## Usage: ac [--host-mounts] +## Example: +## $ ac --host-mounts +host_mounts(){ + message "Print mount tables used by the $0 utility" + $VENV/mounts.sh --print-mount-tables +} + +# ------------------------------------------------------------------------------ +# Print the managed z/OS node IDs +# ------------------------------------------------------------------------------ +#->host-nodes: +## Display the z/OS managed node IDs. +## Usage: ac [--host-nodes] +## Example: +## $ ac --host-nodes +host_nodes(){ + message "Print local managed node IDs." + $VENV/venv.sh --targets +} + +# ------------------------------------------------------------------------------ +# This is the only script that has to be relativly executed from (scripts/) +# because at this point, no managed venv exists. +# TODO: Support -force to replace/update and possible BYO reqs file +# ------------------------------------------------------------------------------ +#->venv-setup: +## Create managed virtual environments using the latest (discovred) python3. +## If the password option is not provided, the info.env.axx file will not be +## associated to the managed venv's, thus you will see messages asking you to +## export some variables such as USER, HOST_SUFFIX, etc. Choosing not to use +## the 'password' option should only an option when the utility can not decrypt. +## Usage: ac [--venv-setup] [--password 123456] +## Example: +## $ ac --venv-setup --passsword 123456 +## $ ac --venv-setup +venv_setup(){ + option_pass=$1 + message "Create managed virtual environments based on hosts latest python3." + scripts/./venv.sh --vsetup --password $option_pass +} + +# ------------------------------------------------------------------------------ +# Allows you to activate the lastet ansible managed virtual enviroments +# TODO: Allow user to specify which venv they can start +# ------------------------------------------------------------------------------ +#->venv-start: +## Activate the lastest ansible managed virtual environment. +## Usage: ac [--venv-start] +## Example: +## $ ac --venv-start +venv_start(){ + message "Starting managed python virtual environment: $VENV_BASENAME" + #. $VENV_BIN/activate; exec /bin/sh -i + /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i" +} + +# ------------------------------------------------------------------------------ +# Allows you to deactivate the lastet ansible managed virtual enviroments +# TODO: Allow user to specify which venv they can stop +# ------------------------------------------------------------------------------ +#->venv-stop: +## Deactivate the lastest ansible managed virtual environment. +## Usage: ac [--venv-stop] +## Example: +## $ ac --venv-stop +venv_stop(){ + message "Stopping managed ansible virtual environment located at: $VENV_BASENAME" + message "ac --venv-stop does not actually currently work, use CNTL-D" + . deactivate $VENV_BASENAME; +} + +# ============================================================================== +# Main arg parsing +# ============================================================================== +while true; do + option=$1 + if [ "$option" ]; then + # Check that we see a '-' or '--' in all options, else error and exit. + test_long_input=`echo "$option" | grep "^--?*"` + test_short_input=`echo "$option" | grep "^-?*"` + if [ ! "$test_short_input" ] && [ ! "$test_long_input" ]; then + echo "Please use valid syntax for option $option, it appears to be missing '-' or '--'." + exit 1 + fi + fi + + case $1 in + -h|-\?|--help) + if [ "$1" = "-h" ] || [ "$1" = "-?" ]; then + help + else + help "verbose" + fi + exit + ;; + --ac-bandit) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-bandit" + ;; + --ac-build) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-build" + ;; + --ac-install) + ensure_managed_venv_exists $1 # Command + option_submitted="--ac-install" + ;; + --ac-sanity |--ac-sanity=?*) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-sanity" + ;; + --ac-test|--ac-test=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--ac-test" + ;; + --ac-test-config|--ac-test-config=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--ac-test-config" + ;; + --ac-version) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-version" + ;; + --file-encrypt) # Command + ensure_managed_venv_exists $1 + option_submitted="--file-encrypt" + ;; + --file-decrypt) # Command + ensure_managed_venv_exists $1 + option_submitted="--file-decrypt" + ;; + --host-auth|--host-auth=?*) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-auth" + ;; + --host-config) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-config" + ;; + --host-mount|--host-mount=?*) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-mount" + ;; + --host-mounts) + ensure_managed_venv_exists $1 # Command + option_submitted="--host-mounts" + ;; + --host-nodes) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-nodes" + ;; + --venv-setup) # Command + option_submitted="--venv-setup" + ;; + --venv-start) # Command + ensure_managed_venv_exists $1 + option_submitted="--venv-start" + ;; + --venv-stop) # Command + ensure_managed_venv_exists $1 + option_submitted="--venv-stop" + ;; + --debug|--debug=?*) # option + debug=`option_processor $1 $2` + option_sanitize $debug + shift + ;; + --file|--file=?*) # option + file=`option_processor $1 $2` + option_sanitize $file + shift + ;; + --host|--host=?*) # option + host=`option_processor $1 $2` + option_sanitize $host + shift + ;; + --level|--level=?*) # option + level=`option_processor $1 $2` + option_sanitize $level + shift + ;; + --out-file|--out-file=?*) # option + out_file=`option_processor $1 $2` + option_sanitize $out_file + shift + ;; + --password|--password=?*) # option + password=`option_processor $1 $2` + option_sanitize $password + shift + ;; + --python|--python=?*) # option + python=`option_processor $1 $2` + option_sanitize $python + shift + ;; + # --tests|--tests=?*) # option + # tests=`option_processor $1 $2` + # option_sanitize $tests + # shift + # ;; + --version|--version=?*) # option + version=`option_processor $1 $2` + option_sanitize $version + shift + ;; + --zoau|--zoau=?*) # option + zoau=`option_processor $1 $2` + option_sanitize $zoau + shift + ;; + --) # End Arg parsing + #shift + break + ;; + -?*) # Warn for invalid but continue parsing for valid + printf "${YEL}WARN${ENDC}: Unknown option (ignored) for $0: %s\n" "$1" >&2 + shift + ;; + *) + if [ ! "$option_submitted" ]; then + # Error when no '-' short or long '--' found and exit + echo "${RED}ERROR${ENDC}: No long or short option has been submitted, use './ac --help to see options." + exit 1 + fi + + # Nothing left to process, drop down into action processing + break + esac + shift +done + +# ============================================================================== +# Action processing +# ============================================================================== + +if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then + ac_bandit $level +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then + ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then + ac_install $version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then + ac_sanity $version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then + ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${debug:=""} +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test-config" ] ; then + ac_test_config +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-version" ] ; then + ac_version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-encrypt" ] ; then + file_encrypt $file $out_file $password +elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-decrypt" ] ; then + file_decrypt $file $out_file $password +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-auth" ] ; then + host_auth $host +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mount" ] ; then + host_mount $host +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mounts" ] ; then + host_mounts +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then + host_nodes +elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then + venv_setup $password +elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then + venv_start +elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-stop" ] ; then + venv_stop +fi diff --git a/changelogs/fragments/766-ac-command-replace-makefile.yml b/changelogs/fragments/766-ac-command-replace-makefile.yml new file mode 100644 index 000000000..ca0d17e0f --- /dev/null +++ b/changelogs/fragments/766-ac-command-replace-makefile.yml @@ -0,0 +1,4 @@ +trivial: +- ac - fixed makefile limitations and monolithic design. Command 'ac' performs + similar function only with greater automation and detection and modularity. + (https://github.com/ansible-collections/ibm_zos_core/pull/766) \ No newline at end of file diff --git a/galaxy.yml b/galaxy.yml index cca9297d3..e4b998278 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -64,27 +64,30 @@ issues: https://github.com/ansible-collections/ibm_zos_core/issues # Ignore files and directories matching the following patterns build_ignore: + - '*.tar.gz' + - __pycache__ + - .cache + - .DS_Store + - .git + - .github + - .gitignore + - .python-version + - .pytest_cache + - .vscode - Jenkinsfile + - ac - ansible.cfg - - .gitignore - - .github - - '*.tar.gz' - - docs - - collections - changelogs + - collections - docs - - tests/__pycache__ + - scripts + - test_config.yml + - tests/*.ini + - tests/*.py - tests/.pytest_cache + - tests/__pycache__ - tests/functional - tests/helpers - - tests/unit - - tests/*.py - - tests/*.ini - tests/requirements.txt - - test_config.yml - - changelogs + - tests/unit - venv* - - make.env.encrypt - - Makefile - - make.env - - scripts \ No newline at end of file diff --git a/make.env.encrypt b/make.env.encrypt deleted file mode 100644 index d07e7032e..000000000 --- a/make.env.encrypt +++ /dev/null @@ -1,287 +0,0 @@ -U2FsdGVkX1+EjJmnWmQEpgyUNArAe8yFuGLfSiIhYAY/a9bxa+ItpMqm5UnIsAIE -np5moXmd3pj9MXZTVJLJSSBQ/QuePjdEfL+HNZWX0Waw8GXdmIWkBkhdBLeRdXqy -0XHRXTptcXhFXx9fOenjgSxm8oblTy3P85EsbVcUrCP9DWEyvAnrFpLSFFcz2OB7 -6/wn5EgMHGhyjaTjcyfkNU+Ae7rNTCAj6Hcl049UzAFb3hGqaz8F/g0wp/mX/ThI -2qDZCsLlREPZsgfqQkW1zXPQxS08eRmUR9FsDnDH1X0dbp/9eSHu5IT3MrwawEz5 -hS/K3XkuHFpWQWWf3JBPXYl0A/m2WCM39bmg13mfnDBJsxmnEY8510zdNTHTqSUG -n9L/ffp2CpCjEDbKQJuzrfuzQ/h5I37XqKvE6OWdY/T48mDsqajGepPBmZF1WrlQ -y7ZMWIlhzxw8P2ZX1nWwchbTxy9kjKs71tk+ikKUoEU56SY+WmVjKvXZtUB3sY/Q -2Nho49hpGJuPV0tE6x8oAMW3ERZI0BYWMrLOf4bm76LGgs85WrEzz7dnaPkCaEbH -SGPx/U93f7zV0X05sdOPPRoZ4mxLxNOtgA8qAQLeAlFnzbLRlq0q4M2MQ4YZ/9HY -HTE+CW8sj35e6TRxPqfF38yisVv9JmQEhZ83qfMqrTaR4PqLWhCm/Hak3LMUNRTw -UrdGDmvhCk/BHqj/kmRyCY5Ts9Z3gktUD9OSuxBVnPxRo+exabbjGqMK0Eslwzgc -nvgba6ReKkh1xf/3PgI3A1ZOKJzkE3YctTcHV0+o26I7JW4kesTdXYIM30RgVF7X -N/akJFV42ZT8xEmYA0k/v3tM/xpQVzj8FbzAjP/d8L6etIolTJPWS/+rkgTaiZdD -EWN7kLfgzmC/OClSjuGRyLNbRed7rGuDLmKZKY+6Bd7tcLgBWHKJQKnZlJcoSx0C -MbTS29iZHmEkV1WWeTm/mrjqZsQ8/5ky4Ug73RePk+rlFovxCLSypFOxJRHR+nte -nYE7iJmTVwE008IoKTWbwxZaqbROSqKL9sw0r5Kps1Eve7oGSoNdzzCTArlteN5e -yU7pz1lZrOWGlT4pVnYnxuY7ayGkLvxqNWBG+bq6VYsh4W9oMHyW4hmRRTcvfB9B -/1B57RUM6rCxAWZYd+/T2TU0Me19NKxhwHct0zrllNwypWUCZQ4kfvQvVu5Xo2AJ -LawsQ1KUcFfVLUfq9ecQU8OSU4MYYIibPrjUqmQF7Sk22HM9cXghAWY0nPfiHTVd -jdWn1c9z8lVNk/jZVxN6H/MS8mC4BRw0tCJDWKWEkhxX3V8AvRdTOIsKYp+i2U5j -CIcVGMfAtmqg1WpJfVYLZrYI2uskjL6uNmy7XmFxdztbiZyuE37NEd7cw0q8n8d/ -mB+BBQ4x6Jd5qvlWNceaRv5QxNZOs36CxM/ta4bGBaAa47CuJ1EMo4EyoirT2Hh7 -tulT758q0HXYsKcg6EImwAYJvEws7MDGFdpWyWz2c9gQX6biNBRXfaGzoXE+hQ8A -e+qPaxO8s5yCGqiKu47KreUotbP0YePyEE+RlRIJGt8t9JGtMVz6k8MzcEVRhtWK -iX1mwjUR2saocyXATYggy7Xmooy8nLjvKqkYx5FKmL/csuwC1gdlDrLRl0MPIhtP -z5o3SSLDud0TzQdM0vnbaV33ynMbon1MRW1CL5OKMHFMh4Zoilm1ti26JiQZQGNS -1899V1IAjsVUgLtCctP4WiIlFw3BCwYjXHnD2r7gCJc+jSkjwrM+ayhZ9VIGFC9K -J7+Ynx9eon/QqvMT5Ek0pJnVsmjJtWLsmOEWmW57VFWKCgW2wW8s9+/YLdUnRCvd -B54x2MNHcGLWx09C/Or2Ir7U1Gcz0LXkkzbDifabUjaH3LjIfYafnyYcu1BIV9R9 -ucP9f4Kow/oWZHi5t0SFNO8ArqLOumTHxFpd0MtsV64xJf8TuZJkSEZbxrb7VoGM -p0zVVgZTp2Ryf8gN3kpTPQE8CCZInpUgipza59z5rtgpGR2X6y1Qswb7vMez9NrG -ihDA3iQX6v/osxNTVfBsTfEjE5tj1vWlm5JxteGZ44RiRmMk5GuvfO3atccFEmxf -NKtJy14jwmy0IepxSMrs9sa8/cLQGH6OBOPUqmE8OpnVtlu8BUt7y9BrUMtSU7g1 -ej3drUGzyuAMSXDPhRuJjL2KlR7/YPKvLpBvhGa+vn/VkFcaeZmx96Aq0dOA2dQD -v/y5xiW13f1s6/MNfm1pt4vGMoijlYHPPH6e4sFRGhqcgsHkJuJuF0HKCunXbrEU -eSgpZ+zzAEt0C/Bk9iaKYvqG9m8YDL+vG+hZETHucvmGATURoyB9DeyLAgLWSIGb -LCiL893yhbgVc4j7tns98X146xX3naHZcCUKzN7cFAn0Kzdxv4T0TMw10wTuQj7L -pzykhyrlSCatGK9ydrZWP2rg8xIiAxtvCPvhZf3Douk4KuF7VTAwOSM3wlgAnuZG -KqhmaFb2cl/UynMiREMYkAgnSo4oraLSIDT2tKkCm/Kv+Fx4HsUCAH205l5ew+/J -9ftje6gE2evhaycR7sbOWul2N3gWHPxNjq21SjZ61uTVfrdu1Ls0vPQ5VlEapFmL -UeHCCiRL6wDZx572zukaueonghq8PUO4GoPWzY3j8kX3PaObzNxyfC+Nt8n2yklJ -vyuWzEMTym5d2knQwt/FSsyz+KCHEfRwqd6Nj0q0KTyfwU2be92Zf8WPDhn6Gv+e -dNjGLdgjX/bvhsEJb1SarSPM6Va8x85qja5PUxcsWYyEhX2VOxWNnG/9HMwhPDDg -oHiBRUKTx2GQ0MShB+M/yG7yWu9JRcBSj8jk6+z5TfdFXfed1+3+h/4v5FNlvBQw -E5WRadVZRa9FjgpjZ/ABDZbTUTIM05akVyegoRxViX6Q2u93u3HpjA2BYM633uJs -l85czl4l6t19Qaes/wyStS5KXMYR4QbmHAmn95zz5C6UMYa/F1SXsbthooEuYMqp -WFBhceMu1d31POJImVbfAY7GQOjsy9Iv4ftbiG0SI+PvByP1/HOwlkyJi1nWRPTv -7yOclBXc3WRx/zvejsQule/Q2pgpeuvB/vw3LVR6GwZfsPzSWoyHurmIJd/+Mpe+ -NyCaUrTsB7IRwnwCX2eKwb9qzyLPZIW/iveJPL/2YrxK1IpHSvbq7fSMGmgWH4V+ -mYIEIiDavMCuZk+yS6WgEF7DtYT8ZAn8UCNmLvDtlMXMVXKHIF7rflSiv3zm1nul -o0giVpplQljtFdl0oOw7iqhpNzSNvTSzHGo/wxMT4ECMuWJ3/cYttFmaf0gY83W8 -tEKl7JG33jxf1MRQwLe99Tn2XTnZC6Mlc5uATQFZu6hwekhjzgO5HBkaK7yaGO2U -xS8m8QT/MWK2Wkqz9WFXjlerLAHR/p7GiWSuRdB5PaBYut1gZwAJvuE+X935qJ07 -nP6jAwuITvxBC3rUKBRcksthAVbVtfQgBw9OfgzUwdEuGf1a0GhrqjRVyC+bwLwa -SVAzu5nlMK0raxxoxTKJ41mWe0wFft8voz4XXCIg+Z9rnNLhxeeIyVCIFFbbDvjP -taYLmadGlxc7Q3aGphWwlfMqVmkyz+8OYzBpXxF3DH6cUFCuqQopiA+7YaJ0GJ7U -AuUYrFWTIAUXcoXpZaZfpirqpoCx7GTkKr8uxFer2KnJcgBtZaz4nIGypVyxYZw/ -x3yMHoCP3MgYJLeMklb0NQYpFhm+yl8T2emxXddDdDqFWKQJXrk2V2XTCsCLPNnQ -qs98SR/jnirtGctuNT/gHxBQjzQfBZNCMEfHLSbDjGt6Jd/KvwsfDC6Pc7a+RrRN -TBObD2KPs0i4mTn9dYjeesRtC9GLucGZ/uT8CsybvXRY2kZi4GV6uHJuQoCjZ5+b -/9+TMIXoWSw9TqA4kesARL/ieUYjUAvTGFsPnNmvG414exjPMFd1bOO2PZM5PMHb -3bj9csVHmF/+MvbUEzJSIl30f+G/4T/8pd/dvQBWbr5rIEys4mLGc6bQTKDp0kwo -9SXblV71Ff97UNpe0G9/m/RhL9PxwTnSOg9aFhUHT/6oWBNeg0TqZdzHcC+nZQ1P -Jjuw28ym8PFigJ0jIwEDoErN3ycIMbvksw7knPbNnGLzsKRaJlawVhijxE5gEpek -fUyUdqo8VCvKkBv3zZmr/QcUO9wF4Ru5ZpHIL3e7K61bgB436hmWMUkLfAiNAxrg -VW0i5HCo6oIrVTyEp7SoWfM9pKyE45EIcwHWlxfyQumJwP8hXPB335yqfyEWAZ6p -9VScSwvvLbe4ruRmjhTsB0Y3i7P6RmDf+0iVnt1g+BG/PAEeR3c9/Zq0G+CXf2he -EFBHPAA9Q2MLp6cF+FNorJ7uLFqCFK/S+IyvPbcf6RFJyIzG+IJf1V4tvmzbh3Bq -O2qhmfaCSDrtPefgIC/nJxgBzDPfJKBKLsuIBQFeapXS3LeZmgZ5btGPy6xasdic -KScJQdPQpF1/bMYmFkf2gdjBxVcIuJE0YgIH0w1snhR9yWRaSGxUafQisOvdMSFR -rWC+9IYHkxNDfhdX+HO9ZMpb6IrwBnw4vv/bHanpp5KIo96pcX85gxwQ7tEP7GZE -4gm3/8fFigthmOM3yLyUdaqKCgjnDqdVVqVldqnNRp81KsXhE+pMdZQCrTU0F6gP -89bYgTA1yQfoM7TEGUz78zA2JmMo0dOIu2dwu3tz24/l5tDxkUVOF0Tx99RaMCix -kvQKYfMFaaBqeMxJViJkfjPxHelqDqk3WAqXhC5t9m56aBqWbasd5rO+7BdsoraE -00m7R3WpMSFSBEEeYTYMmEqyEORVVEqRa376AMWLaFtKtWLu42XFQvJjYk6vbCoX -ZTvev9C/RKwQkrK+JidNJA20haG403OOACxDiDHn49E09KgrMIl7B62hdT13OCun -1mMe7QzPpN2VdzglWGQsz2cyMMBywCiD+A+bPku2B3w0pGGHjKS4IhKiOLkNeR2u -P1qQ2d5U3RRk5BmLevQVGgeV2bHTZP7lDbO7GqWL0uvgWijayoqW/RYxiniXpCNJ -ZK6QwWKyFyr+yaipB8wRm9gmpJdawo4QHl/0T+05jgTHdg2O7t22OVWNNYF4fzms -TqPYYV7KEp3sGk0jrhtzGwz0FE57NLZwFyz0E94afX0ZmnjBLWoFzX5R+YddoOVN -pikf5RHi7iAO8fVurVkoiH0G6osukNqS7myeylU2VjqmmB8u/b09Up3faAHPjLEH -yZewcVoAc+b9x9xQXU7Vk4RO54+HsyooRZAtp7ZXIyPrNpaGpdJV8p4oBtCuM4SR -2BPncWvmmwqae/NFMQnaLsZcDzmgUoyObzwf/fiiXdCYMvJFyFO0wt8yspUCgyPi -6+mkVOhH2mSZc8PFZKMb8nPEfpLsXN/1k9ePdXx3i43tTVeXnyAfdNU4hIjKFfgD -w4aXAvIPe3oHr+gYgwLFgJqTAlr8Egb4Ae+OmJgSI5DCepA0pPI1fmoN+aWT4lWT -q7xUGgQBzw6xU4rIptQfrpNXU1TpkUqsNYlLL1Uu4o4E8d8rdToziVAGZGgusPcT -LgbgMY6TcZMv01BLzq9L/u5acKQKf+AOMQveVupBsSlpVQda/ufj59Up7tPXq9J4 -w8U7qmsHSjv+DczwgzbYn4oX9au2aC5VtglZdNIoe2DT3QMpolblU+V9rMLOnC+F -19EDLn7U+nq0ZpxeSzH0LXDcslN7e/pGwcPJymdk7KWCXf3R4mtf7+IKup2xkO3c -PmOZBYGmESiiupBGYeB5TjmPBRyW/ESaMO+/kwBqV0v5lKG33ATCSDcZ0gq8DOTI -hJza2b4tF2V1qbbAn7FbUHlgxPKvcITsH7yOA9JtdBTSKpixTR6OyV6UNq4xWuyZ -MgzQUHH2ydLrWSiwc5K6KKclsBznaRcMv8Tjj6Ph3soWGRO9dtiV4Qp52OP1HMPA -lX/yybLm4wCjXUetaRkH+bg0+Cfh6NLylXh3vgs/0BSXAVX/zJriYOhpVFsS4nAX -5GVoM/n/C1oEfw/zF7KA2lTDuQHHXJ46PpOxBNfdC2Bx6mcLpconSrHmwRpfdaGT -dd3aeyDZr7wFgsB/gFcqLuOG8ZFW6m0rmMwNQXa6nHImFgereYE0Qt2uSSTWgFH6 -T7NaCsTfUlUvrXxLoWjx7V4yotbnxCP9g0HClewDeXKE7EBYjI5C4QCPvkVfS+2K -GghKjCROpHGnIrl2sghAtI2twcgKA5+wj1Zz9H1O8t75sPYneixHOZCPwNspO6QJ -GSzVCKM/90Qf4io6l0l919/40Y7RxlDJP2c/HaIYXq9aXb/DTOA4EM7nTFMNuPjC -Gl636cfvYEyV1NPEpjWP80398wAsBWePWAJAwlkedw1+Da5ETnmVxG6oIFIS+O78 -2grviqXNS2nzsY2hPU/6Sbe/FGIP5z++6f5dWFNdS9hRUgw57nwKMNp9emhPMeR3 -mFQEqfGNJ6GG4mttCpwXqxKOiNV5PGEHG6X8fQbkBVTApNar0lvTUVza046B7Prr -1agCfgvdSU7ScInXy/ptBEUzmcurc/Qpq3APhyU+4ekCzbkHOz8p8CaB18xHatV2 -Z7mmYp7m4DAHQjCB4Ax85wI5IzB3bzfgw/uDkSgkyTcfAEdT5WLplfBlqf+SDg68 -eWE1Qe/Oa34gwHAdcCsKpg214I7vmTaUbjmWtgqhfTL/98b/trF9bVoZONjld+rT -FRIAIU0JBGBRWbadlAmTnt/vZLGGlopKehw4MzVIrv7HKDxF9dy27nXtGwWFHnd8 -DgFEN07e5U1yjd5OJ3hs+RLjozjO+3cZGxn+rodXktcOXjduA1nAaSZrFPlwjURM -EwnEHWCNlEw1NxlGmpwmuJetcbyqkCPia6Adx5yhlvFE6J+a6n1jRdrbABDfMJ9K -8sCCLRP5sdmrPfc/EoHDocjgyN+khJzfcGQxe0gTzploiMnZi43nOZISh0LG69zC -ApBdQo5ycR+kHM6YMpR46EUl6MbDF5h50leSatflVOsuauLzCVtPHN/1CjaJ+uij -H0yT4ZPFCynxb0j/Zk6J4PuZmxdDVRNo4K4C3vqF7eYgniMTaGbsD/gSubbCC2qg -bIBDYI9nxB7of7Qt8u8CkfO6F7KrFrCDNT2elbRonKl5Pddw6KVaDLK8sfauGpgb -i/XQaL5fVT31e5uGNQYgi4zvNb/JxvY4Yl2K7qwVWecXvgnDfxdgGCDoy0m0h4iY -ZFKunIIPZq11ljy+PgwON955Nwka9aJSkQDKo/t7HKlZqQxYpYyOFd2gsnQEx1FS -rfj5ZgbOYjkPumuBzI4Thfwf+e/0I5stfUukveXsoVmSGt0ta8euOKPogvPZHlsu -67k2DWMsxce9N8E11AgoSQVboYI6YU+VUudyrRdVhUEkp0HvuqxbSB0uz/E70uWu -xZr2gZGhk6yfMAp+yQcGN6dJv2u1GBslV6yYpr0BfHEuJnePHJSwmL9NT3TBrcyS -D/Zh9NtDnuQsbWZXKB/1r3XRqqTfTetDyz4WvRcvBmkoQ4X5RMVT8ywlGzKFieqC -RTUVCWNdG20rVLeuJcaE5txkVfTgSudIEl5aL7ERK45yK13RqP9W4G4YyQHUUVPu -y/sKOCrpGGJHPJBcG8Q0urqeRPq0DkLP9qD626+By9cmMpXW9v+Cc+ZWV33J6dgA -hbBRxq6ruom4UEbNRASkcfVDjqGHUnIhbHI2zgJhNWFvHYt1cJoYPWHXza5lxt/f -NFw7GOwNZolB1QIxv92PAY/PAE7bU+vAjOc93BkI1V6zvNdl93ehCxB3froP0pTY -o4HxVT+9scFcopUH7yXEOvMGXErS+a83ma+0Lp3uEC5be9TdgcmHlHEDfRigiMoX -y1D2lRhMGC9n1kiXsIA6zMnZD7rqlzK+jJSth5yAqFyJOLzD7KKXmCTL2wBpspCM -kKHHvq+VuiLJC1fjkGdyb0gzA4YjLkGIchI/gFdFF6l8/vzGGIXwV5UiranVu2y/ -2gWUjohF17zeBc3O6PaLZLPaPj3g5nFvKGXSj3E7Qj5gtjXdYq2hU9vfShFFCXzg -vtQS+wJZ/mdZAYrHh0+BQ5v5hggXvyQzTUuoxWyAp1iZx0GmGuqHf3gy7o8jwD0K -KfHSnIepMaBR9e+XYQBmvLfFQLrt2KaoGyc6Js7PXMloPl34AiEu1Gr2wftZU6tE -BKeBJ0hY/dAG8s2zuW5SiyMPvpttLLBmfCY/SvlI9tPCbLHMs2NUof9Kkh6n2pwd -PVyM+HLZiLkfxux7ET/J6QrjU6dVts2RXtHDOu1ZJq0NkCu+lj6hgxGTyoliUnub -RRW/+HIbpFYkOLc1LJvJD/TCylTIe2y4ftuiAIVR5vNR6ZnC9Xfwy9pNkatkeeuD -P0J9gJliBjr7lUT9yEzpzFCntIuU7Z8GRDxuP9dnka8phYMHeUkUpWUfUPtVk3JV -NhuaTllC2zO4cfdgo1pMi47tgC8cOwHzkrf6jA5NeRX9hyoHPpDNaKS1QqYYdXQc -r4MiHwVxg3aUAa0j8EQa3V/XPT4frKAQCJvXs3lzh1TrNcAz3r/IVujvNLo2bXmx -B3BTDT6WTqYXFxBoHJKUV+AkMzE3L63o+SrHlOdcqDXf1aDs/YKhZGCuWNS1GE9z -Fo6kY2LUsnTLuN/dz/fG+FtMuvLtwJKKDGX2LG6vF9Fi+uWEFFnj7+RViXF3EqL+ -qOIs1A5XWQeDmFtAk+079sTii0/AdZPR9myF7rNJQOc344Rx+y1OMr7jV08tFugR -D/N3SldpFOQ9Yrc8Ks+cSgYJwUGvDLSg3awNq1wv89hWY7p5UwRtntlz9Evmbjql -CmZE3sbYKupamNaS6rQmyf+Q8kEy17l9Ld34cSd1n2slrUin1KyJZfMZ2F5f4If4 -iXWxlfrI1pJ94F+Ud2n/nQOOiJ/qN08dX/N7qHqV7qJWs3k8eoE3/rdllfaeyllg -OvFuF2XsylR/FIPjN9MA81FfGWV8tWfX/RtHWcTrPutwrV/G/OERKLBvSsTtUxDF -P2bZewouZqUnjXUiRQoPRLPFHLjnmjwHAaVVSHZaHcePFz7pkSlyl7ANfXcl++KT -yK8SnlXkeZji9e7M/unspiI5DWG9HNnEItgmwL4v/XeurN1uPpX/bqjSrkPPASFF -OvlIYqR+Z8kjPuy8oWKzlapquqaqgGrNjvThX57fbLYvgLCpAkRPk231cVXMJBi0 -rE80nIMB14mt3qM+t6FwsMILYrqb78ZI9f9FwGI5eP4mHmPwL8TUfnXty2ZslPW2 -lswgh8GG13272+U4jXVYQOyhcxPuAscTsspUxZH+Lo8EdqpweinxjcGh+DBrQgFq -W+Cfj253dR+y7u0v3lP859gnQKVZk/UxMebGZ3egRniUAZDp0vjeVoqY9RJxqAQ0 -NY3CU/Yi2AOa5bIaaCBbdJGKzTlQdphwMec7q8hlpJwE8f5j2CUmhHrZ/78FsSg9 -uq1zDf9oeouorQun2P4FTtSAa+Hf3MHCTtRoqCMgtlzdBsS4ZUQzSlnqkzKUVJ/9 -lHUeP9MliDwBba2dkCEU01g+BThz3cRn8qEVo6NTEWvxOCG4VB9bWlIDC5UyX1Fz -MhnJe8wLIQwU4DwUex6UWc4yvWvy+VBZvbokmojiYQtFxpcnbhTuXaRrEuSjv9PM -ucmG2i1E7ACQ7+a5u8Ot/ssip+Ox6p8V16LLSKskDt7mNagTG/QYiCZhC3qFyIGV -SdjatCvVIeKx1rYtdslFGxD/UzivV7xPfQHWPU8n6wHWfAVjpUtxsXBJ8/xok5VT -vVVEz/t5IU1E5PImAF/OVfr1kk3/OvG+0cA1KdVplpXE5UocNZwk9+xsX4oHxoto -W8N9ePxRFdCxp+OyVNQVWEmXziJb5orisDDzbekxGLpIQdTNK/IeMsLzA1xxoSJ8 -VEKtk+Bwi03Mj1CBoR5L7zJQiUuZi7rcalYfG2OeOK6BlRDo1o69a5Z29Mry3Zcq -x5lsU9wMhZJ85m7yBmSGMgKO87kbsgzOB+vDWv56SLAWaIMxgtQksDQeIHrBBHmI -ndhvv46XSzfTIeNkSYXTAzjg49ibqz/ftuTWzXrpeoqehUlV0DGoLfXO04vdpkhW -dsZN4qkbKAISGYWETldWrc4SReKewWfs9CqYNoKhAxDI11efUhTLp5o4GD+LVO5F -Zo9B8wZ6AwxqZZkzY1q+GWKab4MM5K0NhgkvuO2XmNMkHf2Pd5BLG0tf73UAXXsj -iFFkYUapjW76sp8zO9+2um8pmV7HIDxNOfaLQVnJW3E+4Qtx4jVoHiAaR6Kr0JFh -5u1cX41mU888IKunnc8DkdSFu6DoiHwIM913nKfCtE+2tp/kzkN7GYoNBqEmhkP/ -RHqiN8KcbmlYZbo0gMjzSRRgnTdJhfTPctaZOylBGJKAObj7hs8c2LB5jxuHyRMb -wov/cqPMJ1Hqx+dGcCzWtdi+6M10596P6KHgfUIMWJKqxFtwEkG6TWtG3ivQpFX8 -w30mImFWKtZSx7XMs/HeR6OkzFIOjm6EAaZ7PGf6DjGrQNFKo1CuS/EIq1+x2v2E -gnUhPiHEdf4sHhOVuZxmniCvnfWOiWF0hj4XmmdqpWyFIi8kJEqU9o88oDnmSMCI -D9lqLQBrgX+NefvxHNxSul1ZMFLQ4weVCcYSyjLsjokoFwTNqlrWQi7rLPPmMS/Z -4O6PDlTEdm8DS7bVgGrTlNEKMWyAE4TEwPDaPaqBYZ/Rp3cCFJSWohpXLenNXJSK -XF5Xx5z6BkY1zGmSJMd1sUPGb6AwxEceywsxDj/5pd9fnwDvqV6d5+g2YaB6Iq0p -PcURws9MzWpewBYUtBkjn6+FQfubVM5Bkr/u/fw2UP1aJ5+x9GDWyMKY5pzxgS0M -FpnuplbrV/30B95qLVykwal/aC2v/aECUOmJsjwXZtAaqoAyFMR9eZ+W0a64k/Tw -v75b4G7jCa681kKeZp6qSQhc3l+PcIG37SFrfsrwLNF47CWEyNULbCa/8jabLTxm -Swxi2tcE0zAoyUv7tpAkN0GR1knA4W+k2Sg2iMppSQ2lV9lGFhTYbRohF95kGZWp -7oht+oD3Y2nwx4TWDA6JjRPHVKBQs6M4I8T//+fbfZgJiD1lx/ZRPrR7jtm7M80s -GDRQvtZ/aul9R0KHweNZHr9o55ywYELf/BNAuUYbH8qeacg0Lc0y69rep/Dk46Hi -EgVrc/jCSKLXm5Gv033BJL2NEZK5nsyctVYXBrk9rA3iO827jZDk5o1l/znOETYh -Y488IK1wfbSvfllyWLoHgtHsr4I+jVg2cgPWK6+ST87GU12almhMPQ2/7ifO52RE -h+ZyHO9+E9y297aMqeozrqUX/7Tr0u3mnJ7bWB3OVz/yezIjw30N0/8zkEM4aIHU -itVupvRqQUxKPBsld/QJ7j/X3oVDX2AWHRMMsKRpES8fWxttWa+DEBxSFCPluAQP -zt9BFDSboyztzdsIsB2Gtd6WRJH0ETOuqvY6/MMqPwWZy8FYOaiJFAZ/Vt+6T+jE -jFAz3dBQfMcYYW+CYt1943atXCm5V2xB6pSqzfCxmgjT/Sq0LLEooME7lY1Pwgqn -wj7VhDuAEw3KtgRheg/pqD6Sf+0pYNx90ormkU83dk6T9aqi9H/mSkbAP6y3OeHM -B99f/CorcIntsAQo+BT7CFx5Dt6kjcVBgtiEJj2J6JMiNFaL1xNQDw+29d7aH5cl -XuKkO3FjTuyAUk8yyllbExK41yxQ5/97vmKa1hO6jQFrsYmeLj18wX7sBkvN/IHs -FVYrverD4enFrQhRRkEstB53+a6UAUJAB27gUbAJ6o8HdCFVm6UuVAOYJOI+ZrsA -ai7akwRGuAz+e5NTYOY8LwOWwuXpBlMklsIgb7Qf/NzncoZALOONNjZ3WS+EwdvI -c99VIvuTp6CUg6BZ8JwU2Hff2jVbSyduvu+0ZC2FclymJsNaFX2pcudwFHJCd2Om -FUNxQ7oCefgu7jA+ji82hGhLWjEM+bur4GesVQdLXxvx1W7BHRJKbp94gaJpvcFk -PVSeN2fdbwavapm+8S+IbB5ciebPpiK890v5LBsDPxR4yjVcPwd6Ssw7lxf9jaly -8LnNAqRAlVbU5DZmItQDC2NoncBCxgkFIE52w7nq6oZk5THNunApuQrGp9TdB6DH -W7kWj4k2ZUQcoyr8L68grduQT9Pc3QHfefwhasfUfg1Rn+I3JR95qXh5zI+ZKmx4 -bvRrtJpbSUt4mtY1B9pl9smhg0Fj7nvBMAcZ12967FUKQQL2VBRcpdFSQNgFR5ml -F/imKA5ll9DeNDBf2nIMwWLjIQK/j5rlDlaG5cfvmdEjfJAIXf3WrIEsDR9Q4AX/ -9xJM7TKW27TqaRJeeZM8b/GAIWzdpFNbqliKR2JpHwqlb7dNkfV0qSVoXopQvpUL -H7bJMEDzn2ruokLzymy7fk/OldDE0Cq41o8VVXpExxTIfMxUlTVJtn50xS2yByg5 -NQfhwlpffq4xOljWwyLaHbq6VQYmN7OuxhjTboow+M0s3tQqA8sosiKjaAg/t6vZ -6HzlmSNoLgd4GOg6AABqblf2fjbD/4mpSHqsnbIIqtVktFtnzWiEU7AphIsztXSf -0HnB7l7xlJxEM6m5sQcP8FNDfErWMdoowBcJgF16QMS545ZqzPANokeWMSl9D57Q -ZNisoZsMZ3UG/rL6TCPMQnqHv3kH3Co1yl+DNrr8dGUC4Bhn6nVg6BFBuiNSeQUQ -uVvJ3PuYeC3NonjSMiRxwhkeST4XfEg9S/UZ+ApLNCLlM5ql5ZJObui2uKFzTAed -jATO9xv44JZF8S8Uva59mQvfC/+CjDNHcOOrrJy2d9OZPu8t4INZs83UTynUxh9F -2Ln3kYZ39yCTwSvB3+kV30R7/n0p1Cfdl7M1+fTW99jKXJ9m9pqjb9QhunOY7T12 -3yCdyTsdwX/PiTIUwM/pKZjw7xuQixOXR+OZpiHmvMnrzUSeMZsxj629E1mrRMhc -jDZb9ZnQOoLNggbaKTRuaE/DjpACpuMpi6jmnjVPAzMyGDOtQ7eaZx5lrpBGgv7z -pzd7SxXJn7ytfWu201TStZd0FyLjiAoYiSn4wNc1itbAhjirRLQxvHvaUbGmkAiY -kdOjeebjg4duEErxaDcRLcKRWk6MWeLH+xP3GjWSeQdalpfXRr1Vbs+ErAC5yWoO -D5W24qzZbzFeWngKEh/Aykchm42ZY5oxsbg41frjD1eKiAqCs+Is8x9CwHtu1uvN -jwxcA5VxkrxZlrVyorm2+mmnZ6LPwccUk8fSUnR0HtI+g/lEVrr72ho8qGVfUBau -4Muc7tg48iggb8Qs6wTuhG2ykBI8agGMV4aIjV1P1pBniXi3RamcMphUkSSplFPV -nBEx5RzaRyQZrtoBsPfGglpeOIwr61l6zgTtD6gWFwCvFar83mihzMzaSo41hDnb -K0EiIZxK3W9jnqV0QsCdKxb9PMrBM3ztAdtV6HJfsZzXxqD+zCA8Hz8rO8+NFgHa -42G6n4nEfXCUuzZOGgRMFSyCFZ3g5c8l+ayfCSk8PLF2Lx5K8cbu5qZrhepicJI0 -2+sEjhpf72oyNmQ3B1lUkRcRRgKWQGfRU5raris7oSiou5zmZUgScowNSyZHObGI -9cDxBrTWbm5kkS9UlXMBWs1Div//UJEmTnbwp3evZ6kAjunedxIjI420jIGD1VCu -+QQ9OZFak0glvOZ9L+9AbcnznmHrOd18qHazDPTwmpf7bAvDIWMwj3K/m+bXIvjY -Oifh6YR9AQiZuvkvFehZR0lFA/SciFejES7UOyfTqLPYDl0YC8tywGaGtNOrXc+4 -p9kYOeYxeKZO2DuOLr9rWgebSi8OCQ1MsigPeYintUOjwDHbWhLZ6tnnpA0/5+V4 -S9ozLh0Zp3P8PyyYUCmRUGEr7Ly5Ec4DM3iU1JBp++ecGCNy9HY89LCmBcicsMV2 -uqP/umcyi3rCYR49SlvGlUNFeKwFxFGoU/zH7MXSFMBUfulLy2EPS/kX9CdgA7XM -Fu3QGYiuyw+0SbBRAAxKACUPiRy80TUidmxSzSWetRlqdW3T/uOMDMuxZN/UbEsD -nqVJJlH/7VBI50cME1SghRw8Dn2Y0wz+pSIFOR2ybzRZuQnsQ3hMZ/tqzuyBsMde -rtHFl6quRela25mnvKDsOuk0X2f5buDLP5Z8ypshXxOyy/MM8aXbvclq/VvpHLu+ -pmE2cztI6bWlMfEABeev9DZ7zjZ4ADPpm22LULe4ZGZlnkR2IjQthPSlnDZQ0Tiq -s6GQCPZbvxTOfQgtgVIQgHph/j+AuYOGGZDrGTyjAe3jeZyaAPL9mAPvKN0v+egr -eJ7dmFnjojFPYIY1JGQ++d3X4AGG/08HnxagRwEsaKdpF+w2ou70cW1TupdSOg7Q -6as3IlWRmPJLExz75MLKcu6jdX6i9b0LkuSZ81PVmXcnsCFaia2acbNytLyAMtOI -ggFvlhaaOueElE6G4LgfB57Qler3ipEO3D8Phl6bRBob3nZzmMSEJjICvgMWS6A2 -y0klebEHZQ7nAuSQRA73/YKty5HuwKK5Q+u7n+kud/nYuB15bCC7HsD9AnXUZWY5 -Wuv5r3H/TDmmqcACbFLjjgBQk6xhqcJ4n8SiN+wLhCSRudPhuwmNppHqC3akHwzH -b235z+FnKmTcbODzqJdiUDgSR12doVX6vMVhYm8rSWms99gou2dcAEtN/VhNXh7U -ZpYY3fjcp+32aSu/C+8TT6IBnUZuHvHiE+WbRf04xCzkOs9TxAz4v/8op9oirgln -cPnpxYY87jlMcH6P/fKzT3qZlzpCYfMAeTkFWkwtZ800nDhlqWVDqwaaPDiTskdo -MgbdeY9En07i3sQLCpLbTPUuJZAjJF4Xd6veORjRM+VAkTDHDZDNjW/sOYylh7W4 -8v//tJwAnpJkmeqmnfB/m72ezVup3LGLM1uCqk5F3JgHu8Nh7cK5Agl36fx9osva -fZARlXRGZHnLgIOsjTdxJhP6vTcQ5LQoObb8ZVmDwo24/6c+RuGDnYq14paDCqym -TXUoEXXVrNNgr7dlKnEULogAiEvL/KU8FN4e63TaxrE6vPBV2yjQTukEBS7tayBj -8rJzExkVX8DGJYQiE5KSkK3rLnJCtVC10w2JEpvS8MbMRH/pcRHDKWJ4K7g8Svk6 -S9157mRQTxwCUeJ1d5lJs8UZiyG491OJQpdDb9Nvk8+rl68CL9/+ecK5JDVCZEl6 -bz+GHeSNKpxlKVlJBnJ1EnGaoLhGxxeozCWWbSlfrg1MYXhfBGEaz4f0AcLLZUQK -6dDoJIEJgsr+z4IUfKp8/pBT85jHoVJjynpIpJO5ry6QmKevONKpCxIwZIF0hulW -B7DF5jLNXyxoK3rYMm3h6TyPFjMRpFqOWRiikanwqUkr/Wla/pVCg1klxijXYqlF -ppu+uk2u//3IjJTsL7z+3qHAzkz7YNvU9sG/jnW0cdVPKxgz3I8KwpDB5iTWBUs3 -S80gfR0KvyPC/OCORhkA4uEYuEQbLeS7EjHDkqGHvJ9Pbn0zbsMyQXThcVyO3vmn -ZvpvTyIXFoKmEsRHNisDv2+1CBLtUz9jguQp7p/Ivw7iUaTIJnXqFghSj6uX0QIo -9Kd90aKBSMpIsIPEq7QWaaqg7KptvE1iGV82OXFTk85yqiaaciLl48tTFBIhY0E6 -Hs3dDe8E+dEAkJsw+E77TdUSLxII3aMPVAfcHZ6EzCVTpYalGAcI+NQFF929WTNH -lp62ZXwm7dibcSs4IkGt8+H9nkDWI1Q0Rm7ssBwxY8oU2B+GAqCWVGFb7zvU6cO0 -bzNypUiHBAdRBLOL97DRE957pmo59guaUA4NwZ528BpWbhLm9tVr13QUYhkirB8r -VYYHYsjZt2PY2+EdG4jcX3tJ15KVL4Q51a7Xpiad6dKZOntHY6AUupnx3OFGIOHE -Hojn0BwNRdyJMOnAXU4ItrDtOFtDBZb3N311uZPMHbz9xt9UMXCKC8cRF6ThITOz -osALdrnkHTNCABIxpd/Lks8pStif7/wU7pHKEO1K0AP+JNS1ZEHsxUEmUFcqr1or -mopzC/nYcEdzE3GXYfdboA+zT9lRDc/2nIM2A7EAU8Nb4x6Th4P/bhG/psb/WXmv -mH7P8X4dYwHqF1jOM5sYLybVUgzhBEzlsnrL3ueu2Qee/2FegJnAqzoHVaGUYx9w -K2OZ02lVF1MyECYVwO1FhG2L8UgUIfdBrecDMnIYTmRmmHAHC8jcrvhPAMnw8wJR -VuQ73QMpOLpSefj1jpJHwk7Co4kEN4I/VSXt+RwRARbb39jJOGOvo5XiJGg2+sAm -m/RSjkK0NRksoTyB+rnVtS8LbOIT+HAyE6mJBGWM88lJiWcVZj0zT3OXZeN3rcwl -HO0AVuaXLmoiv35QB38t5ACxcNgP39rjqiA/HwOMXasPDnhegITXW1a1AykmQKxW -Fdr9m0f6+iFTZMBL1hT9ucBIKSHv7uTOR8gMR4CC+VLhaBp3JEOjkdsxEuyJGf6L -R/AdoGjmyI7nlm/UI8zPYDW5eY4zaNMRR/R3ahB+s1hqZZhk13vuvDhgB+tqt3Be -ni2vzphiSe/5aXCP5tA+gQ+eQ+mS08tFWqETQXTZ9ppywyli8Bg/5EAmpllE5Yt/ -tF5fhBmsTJn+7OWASQajpNnhDrTeuQE3+055f0Fuceqivu7w2OEioOSFBCASVGjJ -lw5boZ/00YMDL9069qNTzvQF1Lhaw3JbLCdxRzRdGkAkL/cvF8kP5YRr1Ez8dELr -eGI3e+gX9wi/ZrhBwlPa4Gzg24Kk0H8Cx176L3/OsD/xKvFyI8rXkylgOAAMsOjU -GMsBUcclRhz8lAMQzlLnUCyBzHVcIvmW1tosz61+FTYl+PsLa6vmQAUnTu5nL2qs -yaneyN0jXqxAEV1dwPTpelAKG50J54OCEdtLeYRufPtYffTekawLDb/Mvd21OywZ -SlQT+4vlcVVa9rBYXEArOjSSPyAnHl4yj3k7bqxdDOX6TfG00m7rOnEVZui2PZIM -AQRCxLINtm3meZ4LrYr7VM0/zoMh2cpxP0CIiSSv+dxM7HOKiFEJTdH0B5VqnYne -nMRjOq/vpOtsrBU+aTZA83AqlKhAyhh7Y5fMg0ELjb3Tf44iuhj2WYdvSjJa2UzE -PqCvT+KnyAaAcKnCltXMYcFIZvpnlrkkc8Uy67kqkblH1rnUUjTuKZOBXE2Ks7zP -tmvsVqK6ONfUH1jTJYcuxHADOswewXi2r/ZbypPZtplw5tVxNTuJsjGKrkR2rsXF -hu6hCC1DwXvyrwkcyNgoNsu8+NirtxlYJNuTHcDdw0ZPGw4fJO6uDFWv0csX8E5q -/sTodrI0dQLijidtYku8eCeqnUmYBXjVexheqrTnuelIV9TQJQzG1iXeZUphNWgN -ss+NQI5ht4KeWgqDM8bQJG48921O/rR7zy4dcsxn4PuJQ/fM1CWYBpfXGEnB7msW -bSFlsr8IgFukjKkOTqSiMmHBaur3/y8pyWtrJNGJdjqUjgiFr3nOc0uT8XWNThRf -r7eTTANk+8OvS8K82LcpmXMzaUkJ7rtgqfg6+cyMDtqQalEh7ggFqhRI6Kv0n+4h -eI8gX+McyUJo47f3ew9qg0ccO/zadKn6WdTNyzkPDXxy8a1UA/8xZArCTVjhAoRI -gkQem2x3clbdBnUsByBMl7rzIgOhdCqx+dC2ghI8ummmJLI99Kdhq3vy8qlbLA4o -zauw/b96C/GPrGVkOxHDGkhc5MPEadWiJZNuouqt3Qx42lgy97RWRBsjBrgk60F8 -XxlUXQ8bmI/aBe4bBXLDqZEiALgowYWCvS1JkMpmjSl+vcCzXL7XypGqek1ZFrQJ -D3hw+JnDseg/8xsle6RUR9MjrGZZXsgTQ8ULAmI6PC8K7WCJGvpHSIw5FXqGDuJM -v3/yRRWHDDLIwKC+3doaIAVD07ww53URJ7lweZk4fjTLIkRz6U7Q9fEl6hZVH6wU -fUqRQecAXcdc+2ZUKmmo1+yp1RHLuo3GM25bEg2TcNQkuwcfCgSlGprXQEOOScH/ -xj8fzxi+l60xgPNdg+TcbAECfpXbroVNkv3rw+axN5TDmaYa3pNDbiGykDpp+u2S -4ussqfHGXTt3SeXnDlRdqyZp9QNbn7cCfAivqZyR0CzFMOpIZf+6u9aIGpAdQVcS -eop6iEnbUeEP5/+EmcEulIlvqh+xKk5ze2YUqE6W4psxozpcVdv/B4hlOfmVmMBf -IcWUE4qyea0Gcpg2BcgovgmYMomUN+QTCRUYCB0l69k8gwjrUJ1H+yvxGhoZdwkO -7PCYujnZF4MZOqN6FSsT2MrUwlX9W15KljHspARPuL7gcAz9qmEAJg== diff --git a/scripts/hosts.env b/scripts/hosts.env new file mode 100644 index 000000000..8351ba350 --- /dev/null +++ b/scripts/hosts.env @@ -0,0 +1,42 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Source the envrionment file +# if [ ! -f "info.env" ]; then +# . ./info.env +# else # check if the env varas instead have been exported +# if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then +# echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported." +# echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env." +# exit 1 +# fi +# fi + +host_list_str="ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ +"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ +"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} " diff --git a/scripts/info.env.axx b/scripts/info.env.axx new file mode 100755 index 000000000..b2e6308fb --- /dev/null +++ b/scripts/info.env.axx @@ -0,0 +1,15 @@ +U2FsdGVkX18VoSEji2kkFCFNcDHC1mzJ+hUulvTheU5dL9E/lmDWS6qdk8R1VCPJ +WyRU1Zefxvc1fw/sqvmzliUgBXXN6dOgRv73+ap8vyp+IvUhCVAZl9efFXHZ2Eag +6loROID0Qq28Bd+5Btqk/JuC6az9QvnV1E4MRhmZBtCIJ8P/joXKIigR9KHGvL0N +7PpA20UxvMzSH/vQSFd0zkuuvjAAzxN/AVO3W0Jbw1fmHy0gqp4TxidqXF0JatdC +YtDadHqyGHCid3hDP0+GwS4yCSEL/uNEE1e3Npe5EF52YB1OE5y7WqJFmQT1OdNd +pkpPok73YNyPtetMBzIr6t3BcnXHL1j38lrDcMZvBy9RWQ2LQiSxmRiGanEg+i9L +SBapVYDJJ64eKZd7T7gY4gViytT0/i6IAqgGqoH0Dk9LQnGmQ7bOqi34zOna/iC2 +PFzx8XFZF/BmXQm3/96xJsRv15IMKCRp2t9lha0N/FRVmEYp7n5loi6oj5hCtD5k +CV1nbzO9cvMH1c85LUeWjTfcEmTA0criSCiBY3zLywrBvs6XsV6EkITMjPh1K2ht +AHXVPykPHhG6+F0LPYS4gasc0jLRTCxVyPRrl3tSf5aGvvo7ilsZrUtVh2UKUkuN +bjpUHCsrsV17LZUb5fWbY3B0EB1NxHa2rO3cb0ausUd+Mf+02SlnPsnaxjX7lTna +ymUlYs6oQcfAfhHM1mCf8miS4ES2HBdl9Urk9BiepSRJudoaBjIL/L9IsaInYpdv +BfW04gocwKJOhhGUE5ql4+DBfoCrWbz4bIGlUSfEIdFiRmsHG8723JQXgq19c4il +oFe7inTT14QHNsI7JNWmDDxsBPkItgJJ00JR+WwZd77jDTHJhlXuf8lYevQCRKla +BDZ3DlqvbK2ILBWFz6XTjPdlNu2fYsxlW4R5kLKsTyI= diff --git a/scripts/mount-shr.sh b/scripts/mount-shr.sh deleted file mode 100755 index 7b2048aaa..000000000 --- a/scripts/mount-shr.sh +++ /dev/null @@ -1,92 +0,0 @@ -# ============================================================================== -# Copyright (c) IBM Corporation 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -# ============================================================================== -# Mount data sets to USS mounts -# ============================================================================== - -set -A mount_list "/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ -"/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ -"/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ -"/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ -"/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ -"/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ -"/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ -"/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ -"/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ -"/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ -"/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ -"/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ -"/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ -"/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ -"/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ -"/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ -"/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ -"/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ -"/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ -"/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" \ -"/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ -"/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ -"/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ -"/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ -"/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ -"/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" - -mount(){ - unset path - unset data_set - for tgt in "${mount_list[@]}" ; do - # TODO: Can use something like the below to find ouf a mount is in place and act on that - # df /zoau/v1.0.0-ga | tail -n +2 |cut -d " " -f 2 | sed 's/(//' | sed 's/.$//' - path=`echo "${tgt}" | cut -d ":" -f 1` - data_set=`echo "${tgt}" | cut -d ":" -f 2` - mkdir -p ${path} - echo "Mouting data set ${data_set} to ${path}." - /usr/sbin/mount -r -t zfs -f ${data_set} ${path} - done -} - -unmount(){ - unset path - unset data_set - for tgt in "${mount_list[@]}" ; do - path=`echo "${tgt}" | cut -d ":" -f 1` - data_set=`echo "${tgt}" | cut -d ":" -f 2` - echo "Unmounting data set ${data_set} from ${path}." - /usr/sbin/unmount ${path} - done -} - -usage () { - echo "" - echo "Usage: $0 --mount, --unmount" - echo " $0 --mount" - echo "Choices:" - echo " - mount: will create paths and mount data sets." - echo " - unmount: will unmount data sets from paths." -} - -################################################################################ -# Main arg parse -################################################################################ -case "$1" in ---mount) - mount - ;; ---unmount) - unmount - ;; -*) - usage - ;; -esac diff --git a/scripts/mounts.env b/scripts/mounts.env new file mode 100644 index 000000000..8f944d971 --- /dev/null +++ b/scripts/mounts.env @@ -0,0 +1,75 @@ +# ============================================================================== +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# ============================================================================== +# KSH (Korn Shell) Array of mounts index delimited by " ", entries delimited by ":" +# More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm +# This `mounts.env` is sourced by several other files, only these lists needs to +# be maintained. +# ============================================================================== + +# ------------------------------------------------------------------------------ +# ZOAU MOUNT TABLE +# spec: zoau_mount_list[0]="<index>:<version>:<mount>:<data_set><space>" +# example: zoau_mount_list[0]="1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS " +# Format: +# index - used by the generated profile so a user can select an option +# version - describes the option a user can select +# mount - the mount point path the data set will be mounted to +# data_set - the z/OS data set containing the binaries to mount +# space - must be a space before the closing quote +# ------------------------------------------------------------------------------ +zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ +"2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS "\ +"3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ +"6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS "\ +"7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS "\ +"8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ +"9:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ +"10:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ +"11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ +"12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ +"13:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " + +# ------------------------------------------------------------------------------ +# PYTHON MOUNT TABLE +# spec: python_mount_list[0]="<index>:<version>:<home>:<mount>:<data_set><space>" +# example: python_mount_list[0]="/python2:IMSTESTU.PYZ.ROCKET.V362B.ZFS " +# Format: +# mount - the mount point path the data set will be mounted to +# data_set - the z/OS data set containing the binaries to mount +# space - must be a space before the closing quote +# Mismarked: "/allpython/3.8.5:IMSTESTU.PYZ.V380.GA.ZFS "\ +# ------------------------------------------------------------------------------ +python_mount_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz:/allpython/3.8.2:IMSTESTU.PYZ.ROCKET.V362B.ZFS "\ +"2:3.8.3:/allpython/3.8.3/usr/lpp/IBM/cyp/v3r8/pyz:/allpython/3.8.3:IMSTESTU.PYZ.V383PLUS.ZFS "\ +"3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz:/allpython/3.9:IMSTESTU.PYZ.V39016.ZFS "\ +"4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz:/allpython/3.10:IMSTESTU.PYZ.V3A09.ZFS "\ +"5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11:IMSTESTU.PYZ.V3B02.ZFS "\ +"6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS " + +# ------------------------------------------------------------------------------ +# PYTHON PATH POINTS +# spec: python_path_list[0]="<index>:<version>:<path><space>" +# example: python_path_list[0]="1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz " +# Format: +# index - used by the generated profile so a user can select an option +# version - describes the option a user can select +# path - the path where a particular python can be found +# space - must be a space before the closing quote +# ------------------------------------------------------------------------------ +python_path_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz "\ +"2:3.8.3:/allpython/3.8.3/usr/lpp/IBM/cyp/v3r8/pyz "\ +"3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz "\ +"4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz "\ +"5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz "\ +"6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file diff --git a/scripts/mounts.sh b/scripts/mounts.sh old mode 100644 new mode 100755 index 0fcfecb38..7ce7252ca --- a/scripts/mounts.sh +++ b/scripts/mounts.sh @@ -1,70 +1,632 @@ - # ============================================================================== - # Copyright (c) IBM Corporation 2023 - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # ============================================================================== - - # ============================================================================== - # KSH (Korn Shell) Array of mounts index delimited by " ", etries delimited by ":" - # More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm - # This `mounts.sh` is sourced by serveral other files, only these lists needs to - # be maintained. - # ============================================================================== - - # ------------------------------------------------------------------------------ - # zoau_mount_list[0]="<index>:<version>:<mount>:<data_set>" - # e.g: zoau_mount_list[0]="1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" - # Format: - # index - used by the generated profile so a user can select an option - # version - describes the option a user can select - # mount - the mount point path the data set will be mounted to - # data_set - the z/OS data set containing the binaries to mount - # ------------------------------------------------------------------------------ - set -A zoau_mount_list "1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ - "2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ - "3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ - "4:1.0.1-ptf1:/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ - "5:1.0.1-ptf2:/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ - "6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ - "7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ - "8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ - "9:1.1.0-spr:/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ - "10:1.1.0-spr5:/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ - "11:1.1.0-spr7:/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ - "12:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ - "13:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ - "14:1.2.0f:/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ - "15:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ - "16:1.2.1-rc1:/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ - "17:1.2.1g:/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ - "18:1.2.1h:/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ - "19:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ - "20:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" - - # ------------------------------------------------------------------------------ - # python_mount_list[0]="<mount>:<data_set>" - # python_mount_list[0]="/python2:IMSTESTU.PYZ.ROCKET.V362B.ZFS" - # ------------------------------------------------------------------------------ - set -A python_mount_list "/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ - "/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ - "/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ - "/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ - "/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ - "/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" - - # ------------------------------------------------------------------------------ - # python_path_list[0]="<index>:<version>:<path>" - # python_path_list[0]="1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" - # ------------------------------------------------------------------------------ - set -A python_path_list "1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" \ - "2:3.9:/python2/usr/lpp/IBM/cyp/v3r9/pyz" \ - "3:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz" \ - "4:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz" +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Description: +# TODO... +# Maintain: +# zoau_mount_list_str - zoau mount points +# python_mount_list_str - python mount points +# python_path_list_str - python executable paths +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# Globals +# ------------------------------------------------------------------------------ +cd $(dirname $0) + +# Current shell, bash returns 'bash' +CURR_SHELL=`echo $$ $SHELL | cut -d " " -f 2 | sed 's|.*/||'` + +# System script is running on at the momement +SYSTEM=`uname` + +# Array where each entry is: "<index>:<version>:<mount>:<data_set>" +ZOAU_MOUNTS="" + +# Array where each entry is: "<mount>:<data_set>" +PYTHON_MOUNTS="" + +# Array where each entry is: "<index>:<version>:<path>" +PYTHON_MOUNT_PATHS="" + +# ZOAU matching an ZOAU ID (first column in mount table) +ZOAU_HOME="" + +# PYZ matching an PYZ ID (first column in mount table) +PYZ_HOME="" + +# Cosmetic divider +DIV="-----------------------------------------------------------------------" + +# Supporting bash will take added testing, the port on z/OS has enough +# differences to warrnat temporarily disabliing the function on z/OS. More +# specifically, when using `vi` in Bash, editing becomes a problem. +if [ "$CURR_SHELL" = "bash" ]; then + if [ "$SYSTEM" = "OS/390" ]; then + echo "Script $0 can not run in 'bash', please execute in another shell." + exit 1 + fi +fi + +# ============================================================================== +# ********************* Helper functions ********************* +# ============================================================================== +message(){ + echo $DIV; + echo "$1"; + echo $DIV; +} + +# ------------------------------------------------------------------------------ +# Private function that initializes an array ($1) from a properly delimited +# string. Array types supported are either Korn Shell (ksh) (more precisely, +# ksh88 and ksh93 variants) or Bash style. +# More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm +# Other shells may need to be supported in the future. +# GLOBAL: See arguments $1 +# ARGUMENTS: +# - $1 (variable) a global var that will be unset and initialized as an array +# - $2 (string) a string delimited by spaces (' ') and entries delimited by a +# colon (':'). This string is used to create set an array. +# OUTPUTS: None +# RETURN: None +# USAGE: _set_shell_array <var> <string> +# ------------------------------------------------------------------------------ +_set_shell_array(){ + # Notes: + # ksh is hard to detect on z/OS, for now comparing to `sh` works else we can + # add in the results for `echo $PS1; echo $PS2; echo $PS3; echo $PS4` + # which returns in this order ('#', '>', '#?', '+') to detect `sh` + unset $1 + if [ "$CURR_SHELL" = "sh" ]; then + # set -A $1 "${@:2}" # parens `{` don't work in z/OS ksh, work on mac + set -A $1 $2 + else + #eval $1='("${@:2}")' + eval $1='(${@:2})' + fi +} + +# ------------------------------------------------------------------------------ +# Source scripts needed by this script. +# ------------------------------------------------------------------------------ + +if [ -f "mounts.env" ]; then + . ./mounts.env +else + echo "Unable to source file: 'mounts.env', exiting." + exit 1 +fi + +# ------------------------------------------------------------------------------ +# Private function that initializes a variable as an global array for either +# Korn Shell (ksh) or other shells where other at this point is following +# bash style arrays. Other shells may need to be supported in the future. +# +# GLOBAL: See arguments $1 +# ARGUMENTS: +# - $1 (variable) a global var that will be unset and initialized as an array +# - $2 (string) a string delimited by spaces used to create a global array +# OUTPUTS: None +# RETURN: None +# USAGE: _set_shell_array <var> <string> +# ------------------------------------------------------------------------------ +# _set_shell_array(){ +# # ksh is hard to detect, for now comparing to `sh` works else we can +# # add in the results for `echo $PS1; echo $PS2; echo $PS3; echo $PS4` +# # which returns in this order ('#', '>', '#?', '+') to detect `sh` +# if [ "$CURR_SHELL" = "sh" ]; then +# # set -A $1 "${@:2}" # parens `{` don't work in z/OS ksh, work on mac +# set -A $1 $2 +# else +# eval $1='(${@:2})' +# fi +# } + +# ------------------------------------------------------------------------------ +# Normalize the array for the shell use, creates an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. +# Creats a normalized array `PYTHON_MOUNTS`, `ZOAU_MOUNTS` +# ------------------------------------------------------------------------------ +# set_python_to_array(){ +# unset PYTHON_MOUNTS +# _set_shell_array PYTHON_MOUNTS "$(echo $python_mount_list_str)" +# } + +# set_zoau_to_array(){ +# unset ZOAU_MOUNTS +# _set_shell_array ZOAU_MOUNTS "$(echo $zoau_mount_list_str)" +# } + +# ------------------------------------------------------------------------------ +# Normalize an array for the shell use, create an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. Initializes a global array `ZOAU_MOUNTS` where each +# index contains a clolon `:` delimited values about ZOAU mounts. For example +# ZOAU_MOUNTS[0] has in it <index>:<version>:<mount>:<data_set> where that may +# look like "1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS", see sourced script +# `mounts.env` for more information. +# GLOBAL: ZOAU_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_zoau_mounts +# ------------------------------------------------------------------------------ +set_zoau_mounts(){ + unset ZOAU_MOUNTS + _set_shell_array ZOAU_MOUNTS "$(echo $zoau_mount_list_str)" +} + +# ------------------------------------------------------------------------------ +# Normalize an array for the shell use, create an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. Initializes a global array `PYTHON_MOUNTS` where each +# index contains clolon `:` delimited values about PYTHON mounts. For example +# PYTHON_MOUNTS[0] has in it <index>:<version>:<mount>:<data_set> where that may +# look like "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10:IMSTESTU.PYZ.V3A0.ZFS ", +# see sourced script `mounts.env` for more information. +# GLOBAL: PYTHON_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_python_mounts +# ------------------------------------------------------------------------------ +set_python_mounts(){ + unset PYTHON_MOUNTS + _set_shell_array PYTHON_MOUNTS "$(echo $python_mount_list_str)" +} + +# ------------------------------------------------------------------------------ +# Normalize an array for the shell use, create an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. Initializes a global array `PYTHON_MOUNT_PATHS` where each +# index contains clolon `:` delimited values about PYTHON paths. For example +# PYTHON_MOUNT_PATHS[0] has in it <index>:<version>:<path><space> where that may +# look like "1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz ", +# see sourced script `mounts.env` for more information. +# GLOBAL: +# ZOAU_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_python_mount_paths +# ------------------------------------------------------------------------------ +set_python_mount_paths(){ + unset PYTHON_MOUNT_PATHS + _set_shell_array PYTHON_MOUNT_PATHS "$(echo $python_path_list_str)" +} + +# ============================================================================== +# ********************* Mount functions ********************* +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Mount all data sets in the sourced mount table, check if the entries are +# already mounted, compare that to the data set being mounted, if they don't +# match, umount and mount the correct one else skip over it. +# +# GLOBAL: See arguments $1 +# ARGUMENTS: +# - $1 (variable) a global var that will be unset and initialized as an array +# - $2 (string) a string delimited by spaces used to create a global array +# OUTPUTS: None +# RETURN: None +# USAGE: _set_shell_array <var> <string> +# ------------------------------------------------------------------------------ +mount(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + + # Call helper script to have ZOAU_MOUNTS generated + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + # zoau_mounted_data_set can be empty so perform added validation + zoau_mounted_data_set=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + + # If zoau_mounted_data_set is empty or does not match expected, it means we should perform the mount + if [ "$zoau_mounted_data_set" != "$zoau_data_set" ]; then + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + + # If zoau_mounted_data_set not empty, compare the mount points and if they match, then unmount. + # Note, the mount point could be root (/) waitng for children so lets compare before unmounting. + if [ ! -z "${zoau_mounted_data_set}" ]; then + temp_mount=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 1` + if [ "${zoau_mount}" = "${temp_mount}" ]; then + /usr/sbin/unmount ${zoau_mount} + fi + fi + mkdir -p ${zoau_mount} + /usr/sbin/mount ${1} ${zoau_data_set} ${zoau_mount} + else + echo "ZOAU ${zoau_version} is already mounted on data set ${zoau_data_set} to path ${zoau_mount}." + fi + done + + unset python_mount + unset python_data_set + # Call helper script to have PYTHON_MOUNTS generated + set_python_mounts + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + # python_mounted_data_set can be empty so perform added validation + python_mounted_data_set=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + + # If python_mounted_data_set is empty or not, we will perform a mount + if [ "$python_mounted_data_set" != "$python_data_set" ]; then + echo "Mouting Python ${python_mount} on data set ${python_data_set}." + + # If python_mounted_data_set not empty, compare the mount points and if they match, then unmount. + # Note, the mount point could be root (/) waitng for children so lets compare before unmounting. + if [ ! -z "${python_mounted_data_set}" ]; then + temp_mount=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 1` + if [ "${python_mount}" = "${temp_mount}" ]; then + /usr/sbin/unmount ${python_mount} + fi + fi + + mkdir -p ${python_mount} + /usr/sbin/mount ${1} ${python_data_set} ${python_mount} + else + echo "Python ${python_mount} is already mounted on data set ${python_data_set}." + fi + done +} + +# ------------------------------------------------------------------------------ +# Unmount all data sets in the sourced mount table. +# ------------------------------------------------------------------------------ +unmount(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + # Call helper script to have ZOAU_MOUNTS generated + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + zoau_mounted_data_set=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + if [ "$zoau_mounted_data_set" = "$zoau_data_set" ]; then + echo "Unmouting ZOAU ${zoau_version} on data set ${zoau_data_set} from path ${zoau_mount}." + /usr/sbin/unmount ${zoau_mount} + else + echo "ZOAU ${zoau_version} is not currently mounted on data set ${zoau_data_set} to path ${zoau_mount}." + fi + done + + unset python_mount + unset python_data_set + # Call helper script to have PYTHON_MOUNTS generated + set_python_to_array + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + python_mounted_data_set=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + if [ "$python_mounted_data_set" = "$python_data_set" ]; then + echo "Unmouting Python ${python_mount} on data set ${python_data_set}." + /usr/sbin/unmount ${python_mount} + else + echo "Python ${python_mount} is not currently mounted on data set ${python_data_set}." + fi + done +} + +# ------------------------------------------------------------------------------ +# Remount all data sets sourced in the mount table, check if there is something +# already mounted, compare that to the data set being mounted, if they don't +# match, umount and mount the correct one else skip over it. +# ------------------------------------------------------------------------------ +remount(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + # Call helper script to have ZOAU_MOUNTS generated + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + zoau_mounted_data_set=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + # ZOAU is not mounted, perform mount + if [ ! -n "$zoau_mounted_data_set" ]; then + echo "Nothing to unmount, mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + mkdir -p ${zoau_mount} + /usr/sbin/mount -r -t zfs -f ${zoau_data_set} ${zoau_mount} + # ZOAU is currently mounted and matches what we expect + elif [ "$zoau_mounted_data_set" = "$zoau_data_set" ]; then + echo "Unmounting ZOAU ${zoau_version} from path ${zoau_mount} on data set ${zoau_data_set}." + /usr/sbin/unmount ${zoau_mount} + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + mkdir -p ${zoau_mount} + /usr/sbin/mount -r -t zfs -f ${zoau_data_set} ${zoau_mount} + # What is mounted does not match our expected value, perform unmount and mount + elif [ "$zoau_mounted_data_set" != "$zoau_data_set" ]; then + echo "WARNING: Overriding existing mount ${python_mount}." + echo "Unmounting data set ${zoau_mounted_data_set} from path ${zoau_mount}." + /usr/sbin/unmount ${zoau_mount} + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + mkdir -p ${zoau_mount} + /usr/sbin/mount -r -t zfs -f ${zoau_data_set} ${zoau_mount} + else + echo "Unable to determine the existing mounts to remount." + fi + done + + unset python_mount + unset python_data_set + # Call helper script to have PYTHON_MOUNTS generated + set_python_to_array + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + python_mounted_data_set=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + # Pythion is not mounted, perform mount + if [ ! -n "$python_mounted_data_set" ]; then + echo "Nothing to unmount, mouting Python ${python_version} on data set ${python_data_set} to path ${python_mount}." + mkdir -p ${python_mount} + /usr/sbin/mount -r -t zfs -f ${python_data_set} ${python_mount} + #Python is currently mounted and matches what we expect + elif [ "$python_mounted_data_set" = "$python_data_set" ]; then + echo "Unmounting Python ${python_version} from path ${python_mount} on data set ${python_data_set}." + /usr/sbin/unmount ${python_mount} + echo "Mouting Python ${python_version} on data set ${python_data_set} to path ${python_mount}." + mkdir -p ${python_mount} + /usr/sbin/mount -r -t zfs -f ${python_data_set} ${python_mount} + # What is mounted does not match our expected value, perform unmount and mount + elif [ "$python_mounted_data_set" != "$python_data_set" ]; then + echo "WARNING: Overriding existing mount ${python_mount}." + echo "Unmounting data set ${python_mounted_data_set} from path ${python_mount}." + /usr/sbin/unmount ${python_mount} + echo "Mouting Python ${python_version} on data set ${python_data_set} to path ${python_mount}." + mkdir -p ${python_mount} + /usr/sbin/mount -r -t zfs -f ${python_data_set} ${python_mount} + else + echo "Unable to determine the existing mounts to remount." + fi + done +} + + +# ============================================================================== +# ********************* Getter functions ********************* +# ============================================================================== + +get_python_mount(){ + + arg=$1 + unset PYZ_HOME + unset python_version + unset python_home + + # Set PYZ mount table to shell array types + set_python_mounts + + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + + if [ "$arg" = "$python_version" ]; then + PYZ_HOME=$python_home + fi + + done + + if [ ! "$PYZ_HOME" ]; then + echo "PYZ vesion [$arg] was not found in the mount table." + exit 1 + fi +} + + +# Get the zoau home/path given $1/arg else error +get_zoau_mount(){ + arg=$1 + unset ZOAU_HOME + unset zoau_version + unset zoau_mount + + # Set ZOAU mount table to shell array types + set_zoau_mounts + + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + + if [ "$arg" = "$zoau_version" ]; then + ZOAU_HOME=$zoau_mount + fi + + done + + if [ ! "$ZOAU_HOME" ]; then + echo "ZOAU vesion [$arg] was not found in the mount table." + exit 1 + fi +} + +# ============================================================================== +# ********************* Print functions ********************* +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Print python and zoau mount tables +# ------------------------------------------------------------------------------ +print_mount_tables(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + + set_zoau_mounts + + message "Displaying z/OS Python ZOAU table." + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + echo "ID:" $zoau_index + echo " Version:" $zoau_version + echo " Home:" $zoau_mount + echo " Mount:" $zoau_data_set + + done + + unset python_index + unset python_version + unset python_home + unset python_mount + unset python_data_set + + set_python_mounts + + message "Displaying z/OS Python mount table." + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + echo "ID:" $python_index + echo " Version:" $python_version + echo " Home:" $python_home + echo " Mount:" $python_mount + echo " Data Set:" $python_data_set + done + + unset python_index + unset python_version + unset python_path + set_python_mount_paths + message "Displaying z/OS Python path for 'pyz'" + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_path=`echo "${tgt}" | cut -d ":" -f 3` + + echo "ID:" $python_index + echo " Version:" $python_version + echo " Path:" $python_path + done + +} + + +# ============================================================================== +# ********************* Test functions ********************* +# ============================================================================== + +# ============================================================================== +# Simple method to test arrays, test automation should be designed but this +# serves as a lightweight verification test +# GLOBAL: None +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: _test_arrays +# ============================================================================== +_test_arrays(){ + echo "Current shell is: $CURR_SHELL" + + set_zoau_mounts + echo "" + echo "All ZOAU mounts are:" + echo ${ZOAU_MOUNTS[@]} + echo "ZOAU mount 3 is:" + echo ${ZOAU_MOUNTS[3]} + + set_python_mounts + echo "" + echo "All Python mounts are:" + echo ${PYTHON_MOUNTS[@]} + echo "Python mount 3 is:" + echo ${PYTHON_MOUNTS[3]} + + set_python_mount_paths + echo "" + echo "All Python paths are:" + echo ${PYTHON_MOUNT_PATHS[@]} + echo "Python path 3:" + echo ${PYTHON_MOUNT_PATHS[3]} +} + +################################################################################ +# Main arg parser +################################################################################ +case "$1" in + --mount) + mount "-r -t zfs -f" + ;; + --mount-rw) + unmount + mount "-t zfs -f" + ;; + --unmount) + unmount + ;; + --remount) + remount + ;; + --print-mount-tables) + print_mount_tables + ;; + --perform-unit-test) + _test_arrays + ;; + --val) + get_zoau_mount "1.2.1" + get_python_mount "3.10" + echo $ZOAU_HOME + echo $PYZ_HOME + ;; + *) + # If $1 exists and the script matches to $0 because when sourced this would + # thrown error and the added check is to prevent the errors when sourced. + if [ -n "$1" ]; then + if [ "$0" = "mounts-datasets.sh" ]; then + echo "ERROR: unknown parameter $1 for script $0" + fi + fi +esac diff --git a/scripts/profile-shr b/scripts/profile-shr deleted file mode 100755 index c827f3037..000000000 --- a/scripts/profile-shr +++ /dev/null @@ -1,230 +0,0 @@ -#!/bin/sh -# ============================================================================== -# Copyright (c) IBM Corporation 2022, 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -################################################################################ -# Global vars -################################################################################ -zoau_version="" -python_version="" -python_path="" -bash_enabled=false - -################################################################################ -# zoau case match -################################################################################ -zoau_choice () { - case "$1" in - [a]* ) zoau_version="v1.2.0";; - [b]* ) zoau_version="v1.0.0-ga";; - [c]* ) zoau_version="v1.0.1-ga";; - [d]* ) zoau_version="v1.0.1-ptf1";; - [e]* ) zoau_version="v1.0.1-ptf2";; - [f]* ) zoau_version="v1.0.2-ga";; - [g]* ) zoau_version="v1.0.3-ga5";; - [h]* ) zoau_version="v1.0.3-ptf2";; - [i]* ) zoau_version="v1.1.0-spr";; - [j]* ) zoau_version="v1.1.0-spr5";; - [k]* ) zoau_version="v1.1.0-spr7";; - [l]* ) zoau_version="v1.1.0-ga";; - [m]* ) zoau_version="v1.1.1-ptf1";; - [n]* ) zoau_version="v1.2.0f";; - [o]* ) zoau_version="v1.2.1";; - [p]* ) zoau_version="v1.2.1-rc1";; - [q]* ) zoau_version="v1.2.1g";; - [r]* ) zoau_version="v1.2.1h";; - [s]* ) zoau_version="v1.2.2";; - [t]* ) zoau_version="latest";; - * ) echo "" - usage - ;; - esac -} - -################################################################################ -# zoau case match -################################################################################ -python_choice () { - case $1 in - [1]* ) python_version="3.8"; - python_path="/python3/usr/lpp/IBM/cyp/v3r8/pyz";; - [2]* ) python_version="3.9"; - python_path="/python2/usr/lpp/IBM/cyp/v3r9/pyz";; - [3]* ) python_version="3.10"; - python_path="/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz";; - [4]* ) python_version="3.11"; - python_path="/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz";; - *) echo "";usage;; - esac -} - -################################################################################ -# zoau case match -################################################################################ -bash_choice() { - case "$1" in - [b]* ) bash_enabled=true;; - * ) echo "";usage;; - esac -} -################################################################################ -# User input for Python -################################################################################ -usage () { - echo "" - echo "Usage: $0 [abcdefghijklmn] [123] b" - echo "ZOAU Choices:" - echo "\ta) ZOAU /zoau/v1.2.0" - echo "\tb) ZOAU /zoau/v1.0.0-ga" - echo "\tc) ZOAU /zoau/v1.0.1-ga" - echo "\td) ZOAU /zoau/v1.0.1-ptf1" - echo "\te) ZOAU /zoau/v1.0.1-ptf2" - echo "\tf) ZOAU /zoau/v1.0.2-ga" - echo "\tg) ZOAU /zoau/v1.0.3-ga5" - echo "\th) ZOAU /zoau/v1.0.3-ptf2" - echo "\ti) ZOAU /zoau/v1.1.0-spr" - echo "\tj) ZOAU /zoau/v1.1.0-spr5" - echo "\tk) ZOAU /zoau/v1.1.0-spr7" - echo "\tl) ZOAU /zoau/v1.1.0-ga" - echo "\tm) ZOAU /zoau/v1.1.1-ptf1" - echo "\tn) ZOAU /zoau/v1.2.0f" - echo "\to) ZOAU /zoau/v1.2.1" - echo "\tp) ZOAU /zoau/v1.2.1-rc1" - echo "\tq) ZOAU /zoau/v1.2.1g" - echo "\tr) ZOAU /zoau/v1.2.1h" - echo "\ts) ZOAU /zoau/v1.2.2" - echo "\tt) ZOAU /zoau/latest" - echo "" - echo "Python Choices:" - echo "\t1) Python 3.8" - echo "\t2) Python 3.9" - echo "\t3) Python 3.10" - echo "\t4) Python 3.11" - echo "" - echo "Bash shell" - echo "\tb) 'b' to enable bash shell" -} - -################################################################################ -# Message to user -################################################################################ -print_choices () { - echo "Using ZOAU version="$zoau_version - echo "Using python version="$python_version - echo "Bash = ${bash_enabled}" -} - -################################################################################ -# Configure all exports -################################################################################ -set_exports (){ - - export PATH=/bin:. - - ################################################################################ - # Set the ported tools directory on the EC, see the tools you can use, eg: - # vim, bash, etc - ################################################################################ - export TOOLS_DIR=/usr/lpp/rsusr/ported - export PATH=$PATH:$TOOLS_DIR/bin - - ################################################################################ - # Set the editor to VI - ################################################################################ - export TERM=xterm - - ################################################################################ - # Standard exports used in EBCDIC/ASCII conversion needed by tools like pyz/zoau - ################################################################################ - export _BPXK_AUTOCVT='ON' - export _CEE_RUNOPTS='FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)' - export _TAG_REDIR_ERR=txt - export _TAG_REDIR_IN=txt - export _TAG_REDIR_OUT=txt - export LANG=C - - ################################################################################ - # Set Java - ################################################################################ - export JAVA_HOME=/usr/lpp/java170/J7.0 - - ################################################################################ - # Configure Python - ################################################################################ - export PYTHON_HOME=$python_path - export PYTHON=$PYTHON_HOME/bin - export LIBPATH=$PYTHON_HOME/lib:$LIBPATH - - ################################################################################ - # ZOAU 1.0.2 or or earlier ueses ZOAU_ROOT and not ZOAU_HOME - ################################################################################ - export ZOAU_HOME=/zoau/${zoau_version} - export PATH=$ZOAU_HOME/bin:$PATH:$PYTHON:$JAVA_HOME/bin:$TOOLS_DIR/bin - export MANPATH=$MANPATH:$TOOLS_DIR/man - export ZOAU_ROOT=${ZOAU_HOME} - export PYTHONPATH=${ZOAU_HOME}/lib/:${PYTHONPATH} - export LIBPATH=${ZOAU_HOME}/lib:${LIBPATH} - - ################################################################################ - # Custom terminal configurations - ################################################################################ - # Append home directory to the current path - export PATH=$PATH:$HOME: - - # Set the prompt to display your login name & current directory - export PS1='[ $LOGNAME':'$PWD':' ]' - - ################################################################################ - # Run bash shell: - # I have have seen many issues using this version of bash to edit files on the - # EC, for example of you edit your .profile with VI under BASH, it will render - # unreable, for times I have to edit, I type exit it defaults be back into - # the zos_ssh shell which does not have any issues with VI or editing files. - # I generally use bash only for history and running commands. - ################################################################################ - if [ "{$bash_enabled}" = true ]; then - bash; - fi - - alias python="python3" - alias pip="pip3" -} -################################################################################ -# Main -################################################################################ -# User enters choices for zoau, python and bash -if [ $# -eq 3 ];then - zoau_choice $1 - python_choice $2 - bash_choice $3 - print_choices - set_exports - -# User enters choices for zoau and python, bash defaults to false -elif [ $# -eq 2 ];then - bash_enabled=false - zoau_choice $1 - python_choice $2 - print_choices - set_exports - -# User enters choice for zoau, python defaults to 3.8 and bash to false -elif [ $# -eq 1 ]; then - zoau_choice $1 - python_choice 1 - bash_enabled=false - print_choices - set_exports -else - usage -fi diff --git a/scripts/profile.sh b/scripts/profile.sh index 4a10fd3bd..a426ab868 100755 --- a/scripts/profile.sh +++ b/scripts/profile.sh @@ -18,17 +18,36 @@ # ------------------------------------------------------------------------------ CURR_SHELL=`echo $0` +# Supporting bash will take added testing, the port on z/OS has enough +# differences to warrnat temporarily disabliing the function on z/OS. More +# specifically, when using `vi` in Bash, editing becomes a problem. if [ "$CURR_SHELL" = "bash" ]; then - # Have not found a good way to exit the bash shell without ending the profile - echo "This script can not run in a bash emulator, exiting bash and and thus"\ - "you must exit this profile again." - exit 1 + if [ "$SYSTEM" = "OS/390" ]; then + echo "Script $0 can not run in 'bash', please execute in another shell." + exit 1 + fi fi # ------------------------------------------------------------------------------ # Source the known mount points # ------------------------------------------------------------------------------ -. ./mounts.sh +mounts_env="mounts.env" + +if [ -f "$mounts_env" ]; then + . ./$mounts_env +else + echo "Unable to source file: $mounts_env, exiting." + exit 1 +fi + +mount_sh="mounts.sh" + +if [ -f "$mount_sh" ]; then + . ./$mount_sh +else + echo "Unable to source file: $mount_sh, exiting." + exit 1 +fi ################################################################################ # Global vars - since ksh is the default shell and local ksh vars are defined @@ -46,6 +65,14 @@ PYTHON_PATH="" BASH_SELECTED=false +# Array where each entry is: "<index>:<version>:<mount>:<data_set>" +ZOAU_MOUNTS="" + +# Array where each entry is: "<mount>:<data_set>" +PYTHON_MOUNTS="" + +# Array where each entry is: "<index>:<version>:<path>" +PYTHON_MOUNT_PATHS="" # ****************************************************************************** # Search the array `zoau_mount_list` for a matching arg, if it matches set the # global zoau_version var to the zoau version. @@ -58,7 +85,8 @@ get_option_zoau(){ unset zoau_version unset zoau_mount unset zoau_data_set - for tgt in "${zoau_mount_list[@]}" ; do + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do zoau_index=`echo "${tgt}" | cut -d ":" -f 1` zoau_version=`echo "${tgt}" | cut -d ":" -f 2` zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` @@ -79,7 +107,8 @@ get_option_python(){ unset python_index unset python_version unset python_path - for tgt in "${python_path_list[@]}" ; do + set_python_mount_paths + for tgt in "${PYTHON_MOUNT_PATHS[@]}" ; do python_index=`echo "${tgt}" | cut -d ":" -f 1` python_version=`echo "${tgt}" | cut -d ":" -f 2` python_path=`echo "${tgt}" | cut -d ":" -f 3` @@ -110,7 +139,8 @@ help_option_zoau(){ unset zoau_data_set echo "" echo "ZOAU Options:" - for tgt in "${zoau_mount_list[@]}" ; do + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do zoau_index=`echo "${tgt}" | cut -d ":" -f 1` zoau_version=`echo "${tgt}" | cut -d ":" -f 2` zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` @@ -123,8 +153,9 @@ help_option_python(){ unset python_index unset python_version unset python_path + set_python_mount_paths echo "Python Options:" - for tgt in "${python_path_list[@]}" ; do + for tgt in "${PYTHON_MOUNT_PATHS[@]}" ; do python_index=`echo "${tgt}" | cut -d ":" -f 1` python_version=`echo "${tgt}" | cut -d ":" -f 2` python_path=`echo "${tgt}" | cut -d ":" -f 3` @@ -151,8 +182,8 @@ usage () { # Message to user ################################################################################ selected_option () { - echo "Using ZOAU version `zoaversion`" - echo "Using python version `python --version`" + echo "Using ZOAU version $ZOAU_VERSION" + echo "Using python version $PYTHON_VERSION" if [ "${BASH_SELECTED}" = true ]; then echo "Bash is enabled." fi @@ -256,7 +287,7 @@ elif [ $# -eq 2 ];then set_bash # Default zoau 1.2.2 and python 3.9 elif [ $# -eq 0 ]; then - get_option_zoau 19 + get_option_zoau 12 get_option_python 2 get_option_shell false set_exports @@ -267,21 +298,3 @@ elif [ "$1" = help]; then else usage fi - - -# Source should have array mount_list -xxxx(){ - unset index - unset name - unset mount_point - unset data_set - for tgt in "${zoau_mount_list[@]}" ; do - index=`echo "${tgt}" | cut -d ":" -f 1` - name=`echo "${tgt}" | cut -d ":" -f 2` - mount_point=`echo "${tgt}" | cut -d ":" -f 3` - data_set=`echo "${tgt}" | cut -d ":" -f 4` - mkdir -p ${mount_point} - echo "Mouting ZOAU ${name} on data set ${data_set} to path ${mount_point}." - /usr/sbin/mount -r -t zfs -f ${data_set} ${mount_point} - done -} \ No newline at end of file diff --git a/scripts/requirements-2.11.env b/scripts/requirements-2.11.env new file mode 100644 index 000000000..e7defb9fc --- /dev/null +++ b/scripts/requirements-2.11.env @@ -0,0 +1,35 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.11.12" +"pylint" +"six" +"voluptuous" +"yamllint" +"rstcheck" +) + +python=( +"python:3.8" +) \ No newline at end of file diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env new file mode 100644 index 000000000..5052447da --- /dev/null +++ b/scripts/requirements-2.12.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.12.10" +"pylint" +"rstcheck" +) + +python=( +"python:3.8" +) \ No newline at end of file diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env new file mode 100644 index 000000000..c08a7c7e9 --- /dev/null +++ b/scripts/requirements-2.13.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.13.7" +"pylint" +"rstcheck" +) + +python=( +"python:3.8" +) \ No newline at end of file diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env new file mode 100644 index 000000000..9d15b3dab --- /dev/null +++ b/scripts/requirements-2.14.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.14.1" +"pylint" +"rstcheck" +) + +python=( +"python:3.9" +) \ No newline at end of file diff --git a/scripts/requirements-2.9.env b/scripts/requirements-2.9.env new file mode 100644 index 000000000..2d7d9e11b --- /dev/null +++ b/scripts/requirements-2.9.env @@ -0,0 +1,35 @@ + +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# brew install python@3.8 +# ============================================================================== + +requirements=( +"ansible:2.9.27" +"pylint:2.3.1" +"rstcheck:3.3.1" +) + +python=( +"python:3.8" +) + diff --git a/scripts/requirements-common.env b/scripts/requirements-common.env new file mode 100644 index 000000000..365b8aa4f --- /dev/null +++ b/scripts/requirements-common.env @@ -0,0 +1,133 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-common.sh and not change. This supplies the +# venv's with additional packages for use by the developement work flow. +# ============================================================================== + +# Notes, "pylint", "rstcheck", "six", "voluptuous", "yamllint" is common but +# various requirements.txt have it frozen so it becomes a double requement +# error if present here as well. +requirements=( +"alabaster" +"ansible-builder" +"ansible-lint" +"antsibull-changelog" +"astroid" +"attrs" +"Babel" +"bandit" +"bcrypt" +"bindep" +"black" +"bleach" +"bleach-allowlist" +"bracex" +"certifi" +"cffi" +"charset-normalizer" +"click" +"cryptography" +"dill" +"distlib" +"distro" +"docutils" +"filelock" +"flake8" +"GitPython" +"galaxy-importer" +"gitdb" +"idna" +"imagesize" +"importlib-metadata" +"isort" +"Jinja2" +"jsonschema" +"lazy-object-proxy" +"Markdown" +"MarkupSafe" +"markdown-it-py" +"mccabe" +"mdurl" +"mock" +"more-itertools" +"mypy-extensions" +"oyaml" +"Parsley" +"PyNaCl" +"PyYAML" +"Pygments" +"packaging" +"paramiko" +"pathspec" +"pbr" +"platformdirs" +"pluggy" +"py" +"pycodestyle" +"pycparser" +"pyflakes" +"pyparsing" +"pyrsistent" +"pytest" +"pytest-ansible" +"pytest-mock" +"pytz" +"requests" +"requirements-parser" +"resolvelib" +"rich" +"ruamel.yaml" +"ruamel.yaml.clib" +"Sphinx" +"semantic-version" +"shellescape" +"smmap" +"snowballstemmer" +"sphinx-rtd-theme" +"sphinxcontrib-devhelp" +"sphinxcontrib-htmlhelp" +"sphinxcontrib-jsmath" +"sphinxcontrib-qthelp" +"sphinxcontrib-serializinghtml" +"sphinxcontrib.applehelp" +"stevedore" +"subprocess-tee" +"tomli" +"tomlkit" +"types-setuptools" +"typing_extensions" +"urllib3" +"virtualenv" +"wcmatch" +"wcwidth" +"webencodings" +"wrapt" +"zipp" +) + +# This original list caused some issues with pytest seeing our conftest plugin +# as already registered, the only time senstive solution I could come up with +# was to pip freeze a working venv and use that as the common base for now, over +# time, using pip show <package> on each of these packages to figure out why +# this occurs or maybe using pipdeptree will visually help. +# requirements=( +# "bandit" +# "pipdeptree" +# "pytest" +# "oyaml" +# "mock" +# "pytest-ansible" +# ) \ No newline at end of file diff --git a/scripts/requirements-latest.env b/scripts/requirements-latest.env new file mode 100644 index 000000000..505ef1261 --- /dev/null +++ b/scripts/requirements-latest.env @@ -0,0 +1,31 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:latest" +) + + +python=( +"python:3.9" +) \ No newline at end of file diff --git a/scripts/venv.sh b/scripts/venv.sh new file mode 100755 index 000000000..5ec946c49 --- /dev/null +++ b/scripts/venv.sh @@ -0,0 +1,585 @@ + +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# TODO: Need to add more global vars as some are scoped to fucntions and hidden +# from view. +# ------------------------------------------------------------------------------ +# Source +# ------------------------------------------------------------------------------ +cd $(dirname $0) +VERSION_PYTHON_MIN=3.9 +VERSION_PYTHON="0" +VERSION_PYTHON_PATH="" +DIVIDER="====================================================================" +VENV_HOME_MANAGED=${PWD%/*}/venv +# Array where each entry is: "<index>:<version>:<mount>:<data_set>" +HOSTS_ALL="" + +# hosts_env="hosts.env" + +# if [ -f "$hosts_env" ]; then +# . ./$hosts_env +# else +# echo "Unable to source file: $hosts_env, exiting." +# exit 1 +# fi + +mount_sh="mounts.sh" + +if [ -f "$mount_sh" ]; then + . ./$mount_sh +else + echo "Unable to source file: $mount_sh, exiting." + exit 1 +fi + +################################################################################ +# Converts the requirements array into a exported single line delimited with +# '\\\n' so that it can be echo'd into a requirements.txt. For example in the make +# file `echo "${REQ}">$(VENV)/requirements.txt` returns a string: +# "ansible-core==2.11.12;\\\nastroid==2.12.11;\nattrs==22.1.0;.." +# If you want echo this to a file you will need to do something like: +# X=$(./make.env --req) +# echo -e $X>requirements.txt +# Or a one-iner: echo -e $(./make.env --req)>requirements.txt +################################################################################ +export_requirements(){ + unset REQ + export REQ + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + REQ=${REQ}"$key==$value;\n" + done +} + +################################################################################ +# Converts the requirements array into a single line delimited with '\\\n' +# so that it can be echo'd into a file. For example in the make +# file `echo "${REQ}">$(VENV)/requirements.txt`. +################################################################################ +echo_requirements(){ + + unset requirements_common + unset requirements + requirements_common="requirements-common.env" + unset REQ_COMMON + + if [ -f "$requirements_common" ]; then + . ./$requirements_common + else + echo "Unable to source file: $requirements_common, exiting." + exit 1 + fi + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + if [ "$key" = "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + elif [ -z "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + else + REQ_COMMON=${REQ_COMMON}"$key==$value;\\n" + fi + done + + #for file in `ls requirements-*.sh`; do + for file in `ls requirements-[0-9].[0-9]*.env`; do + # Unset the vars from any prior sourced files + unset REQ + unset requirements + unset venv + # Soure the file + if [ -f "$file" ]; then + . ./$file + else + echo "Unable to source file: $file." + fi + + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + if [ "$key" = "$value" ]; then + REQ=${REQ}"$key;\\n" + elif [ -z "$value" ]; then + REQ=${REQ}"$key;\\n" + else + REQ=${REQ}"$key==$value;\\n" + fi + done + echo "${REQ}""${REQ_COMMON}" + + py_req="0" + for ver in "${python[@]}" ; do + key=${ver%%:*} + value=${ver#*:} + py_req="${value}" + done + echo "${py_req}" + done +} + + +# Lest normalize the version from 3.10.2 to 3010002000 +# Do we we need that 4th octet? +normalize_version() { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; +} + +make_venv_dirs(){ + # VENV's control are under this script which is to create them the GitHub + # project root (../venv/), this is because we want this to be managed such + # that direcotry `../venv` is defined in .gitignore and galaxy.yml + # (build_ignore) to avoid having them pulled in by any build process. + + # We should think about the idea of allowing: + # --force, --synch, --update thus not sure we need this method and better to + # manage this logic inline to write_req + for file in `ls requirements-[0-9].[0-9]*.env`; do + # eg extract 2.14 from requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + #echo $venv_name + mkdir -p "${VENV_HOME_MANAGED}"/"${venv_name}" + done +} + +write_requirements(){ + option_pass=$1 + unset requirements_common + unset requirements + unset REQ + unset REQ_COMMON + requirements_common_file="requirements-common.env" + + # Source the requirements file for now, easy way to do this. Exit may not + # not be needed but leave it for now. + if [ -f "$requirements_common_file" ]; then + . ./$requirements_common_file + else + echo "Unable to source file: $requirements_common_file, exiting." + exit 1 + fi + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + if [ "$key" = "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + elif [ -z "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + else + REQ_COMMON=${REQ_COMMON}"$key==$value;\\n" + fi + done + + #for file in `ls requirements-*.sh`; do + for file in `ls requirements-[0-9].[0-9]*.env`; do + # Unset the vars from any prior sourced files + unset REQ + unset requirements + unset venv + # Soure the file + if [ -f "$file" ]; then + . ./$file + else + echo "Unable to source file: $file." + fi + + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + #REQ=${REQ}"$key==$value;\\n" + if [ "$key" = "$value" ]; then + REQ=${REQ}"$key;\\n" + elif [ -z "$value" ]; then + REQ=${REQ}"$key;\\n" + else + REQ=${REQ}"$key==$value;\\n" + fi + done + + py_req="0" + for ver in "${python[@]}" ; do + key=${ver%%:*} + value=${ver#*:} + py_req="${value}" + done + + # Is the discoverd python >= what the requirements.txt requires? + if [ $(normalize_version $VERSION_PYTHON) -ge $(normalize_version $py_req) ]; then + echo "${REQ}${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt + cp mounts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ + #cp info.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ + #cp info.env.axx "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp mounts.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp hosts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp venv.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp profile.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + + # Decrypt file + if [ "$option_pass" ]; then + touch "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env + # Probably can be a 600 - needs testing + chmod 700 "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env + #echo "${option_pass}" | openssl bf -d -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin + echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin + fi + else + echo "Not able to create managed venv path: ${VENV_HOME_MANAGED}/${venv_name} , min python required is ${py_req}, found version $VERSION_PYTHON" + echo "Consider installing another Python for your system, if on Mac 'brew install python@3.10', otherwise review your package manager" + rm -rf "${VENV_HOME_MANAGED}"/"${venv_name}"/ + fi + done +} + + +create_venv_and_pip_install_req(){ + + for file in `ls requirements-[0-9].[0-9]*.env`; do + unset venv + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + + if [ -f $VENV_HOME_MANAGED/$venv_name/requirements.txt ]; then + echo ${DIVIDER} + echo "Creating python virtual environment: ${VENV_HOME_MANAGED}/${venv_name}." + echo ${DIVIDER} + ${VERSION_PYTHON_PATH} -m venv "${VENV_HOME_MANAGED}"/"${venv_name}"/ + ${VENV_HOME_MANAGED}/${venv_name}/bin/pip3 install --upgrade pip + ${VENV_HOME_MANAGED}/${venv_name}/bin/pip install --upgrade pip + "${VENV_HOME_MANAGED}"/"${venv_name}"/bin/pip3 install -r "${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt + else + echo "Virtual environment "${VENV_HOME_MANAGED}"/"${venv_name}" already exists, no changes made."; \ + fi + done +} + + +find_in_path() { + result="" + IFS=: + for x in $PATH; do + if [ -x "$x/$1" ]; then + result=${result}" $x/$1" + fi + done + echo $result +} + + + +# Find the most recent python in a users path +discover_python(){ + # Don't use which, it only will find first in path within script + # for python_found in `which python3 | cut -d" " -f3`; do + pys=("python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") + #pys=("python3.8" "python3.9") + for py in "${pys[@]}"; do + for python_found in `find_in_path $py`; do + ver=`${python_found} --version | cut -d" " -f2` + ver_path="$python_found" + echo "Found $ver_path" + done + + + if [ $(normalize_version $ver) -ge $(normalize_version $VERSION_PYTHON) ]; then + VERSION_PYTHON="$ver" + VERSION_PYTHON_PATH="$ver_path" + fi + + done + + echo ${DIVIDER} + echo "Discovered Python version: ${VERSION_PYTHON}." + echo "Discovered Python path: ${VERSION_PYTHON_PATH}." + echo ${DIVIDER} +} +################################################################################ +# Return Python HOME path when given a key that is contained in the zoau array. +################################################################################ + +get_pyz(){ + set_python_mount_paths + arg=$1 + unset PYZ + echo ${PYTHON_MOUNT_PATHS[@]} + for py in "${PYTHON_MOUNT_PATHS[@]}" ; do + key=${py%%:*} + value=${py#*:} + if [ "$key" = "$arg" ]; then + PYZ="$value" + fi + done +} + +################################################################################ +# Echo Python HOME path when given a key that is contained in the zoau array. +################################################################################ +echo_pyz(){ + get_pyz $1 + echo "${PYZ}" +} + +################################################################################ +# Return ZOAU HOME path when given a key that is contained in the zoau array. +################################################################################ +get_zoau(){ + arg=$1 + unset ZOAU + for zo in "${zoau[@]}" ; do + key=${zo%%:*} + value=${zo#*:} + if [ "$key" = "$arg" ]; then + ZOAU="$value" + fi + done +} + +################################################################################ +# Echo ZOAU HOME path when given a key that is contained in the zoau array. +################################################################################ +echo_zoau(){ + get_zoau $1 + echo "${ZOAU}" +} + +latest_venv(){ + dir_version_latest="0" + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` + + if [ ! -z "$test_for_managed_venv" ]; then + for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | cut -d"-" -f2`; do + if [ $(normalize_version $dir_version) -ge $(normalize_version $dir_version_latest) ]; then + dir_version_latest=$dir_version + fi + done + echo "${VENV_HOME_MANAGED}"/"venv-"$dir_version_latest + fi +} + + + +# ============================================================================== +# Public function that initializes a global array `ZOAU_MOUNTS` where each index +# contains clolon `:` delimited values about ZOAU mounts. For example +# ZOAU_MOUNTS[0] has in it <index>:<version>:<mount>:<data_set> where that may +# look like "1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS", see sourced script +# `mounts.env` for more information. +# GLOBAL: ZOAU_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_zoau_mounts +# ============================================================================== +set_hosts_to_array(){ + + # Source the envrionment file here rather than at the top of this script. + # If you source it to early it will trigger the condtion below that was + # removed from info.env. + if [ -f "info.env" ]; then + . ./info.env + else # check if the env varas instead have been exported + if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then + echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported." + echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env." + exit 1 + fi + fi + + hosts_env="hosts.env" + + if [ -f "$hosts_env" ]; then + . ./$hosts_env + else + echo "Unable to source file: $hosts_env, exiting." + exit 1 + fi + + _set_shell_array HOSTS_ALL "$(echo $host_list_str)" +} + + +################################################################################ +# Host list details used by the function `get_config` to generate +# a collections configuration. Keys can be an ECs hostname or a users laptop +# user name which is the same as what `whoami` returns. +# Using word spliting to split the values into an array, for example +# temp_array=(${tgt//:/ }) translates to ${string//substring/replacement}, thus +# all ':' are matched and replaced with a ' ' and then you have +# (element1 element2 ... elementN) to initialize the array. +################################################################################ + +get_host_ids(){ + set_hosts_to_array + unset host_index + unset host_prefix + for tgt in "${HOSTS_ALL[@]}" ; do + host_index=`echo "${tgt}" | cut -d ":" -f 1` + host_prefix=`echo "${tgt}" | cut -d ":" -f 2` + + echo "ID: $host_index Host: $host_prefix" + done +} + +# Should renane this with a prefix of set_ to make it more readable +ssh_host_credentials(){ + arg=$1 + unset host + unset user + unset pass + + # Call helper script to have ZOAU_MOUNTS generated + set_hosts_to_array + for tgt in "${HOSTS_ALL[@]}" ; do + key=`echo "${tgt}" | cut -d ":" -f 1` + if [ "$key" = "$arg" ]; then + host=`echo "${tgt}" | cut -d ":" -f 2` + user=`echo "${tgt}" | cut -d ":" -f 3` + pass=`echo "${tgt}" | cut -d ":" -f 4` + fi + done +} + +################################################################################ +# Copy a users key to a remote target to be a known host, if the host has a cert +# field in the host_list not equal to none, it will also be copied for jenkins +################################################################################ +ssh_copy_key(){ + sshpass -p "${pass}" ssh-copy-id -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa.pub "${user}"@"${host}" &> /dev/null + + if [ ! -z "$SSH_KEY_PIPELINE" ]; then + echo "${SSH_KEY_PIPELINE}" | ssh "${user}"@"${host}" "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized_keys" + else + echo "This is optional, if you define and export 'SSH_KEY_PIPELINE', the z/OS host can be authenticated with additonal keys such as a pipeline." + fi +} + +################################################################################ +# Scp some scripts to the remote host and execute them. +################################################################################ +ssh_copy_files_and_mount(){ + scp -O "$1" "$2" "$3" "${user}"@"${host}":/u/"${user}" + ssh "${user}"@"${host}" "cd /u/"${user}"; chmod 755 *.sh; ./mounts.sh --mount; exit;" +} + +################################################################################ +# Echo the configuration used by the ansible core python test framework +################################################################################ +echo_config(){ +unset CONFIG + +CONFIG=${CONFIG}"host: ${host}\\\n" +CONFIG=${CONFIG}"user: ${user}\\\n" +CONFIG=${CONFIG}"python_path: ${PYZ_HOME}/bin/python3\\\n" +CONFIG=${CONFIG}"\\\n" +CONFIG=${CONFIG}"environment:\\\n" +CONFIG=${CONFIG}" _BPXK_AUTOCVT: \"ON\"\\\n" +CONFIG=${CONFIG}" _CEE_RUNOPTS: \"'FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)'\"\\\n" +CONFIG=${CONFIG}" _TAG_REDIR_ERR: txt\\\n" +CONFIG=${CONFIG}" _TAG_REDIR_IN: txt\\\n" +CONFIG=${CONFIG}" _TAG_REDIR_OUT: txt\\\n" +CONFIG=${CONFIG}" LANG: C\\\n" +CONFIG=${CONFIG}" ZOAU_HOME: ${ZOAU_HOME}\\\n" +CONFIG=${CONFIG}" LIBPATH: ${ZOAU_HOME}/lib:${PYZ_HOME}/lib:/lib:/usr/lib:.\\\n" +CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib\\\n" +CONFIG=${CONFIG}" PATH: ${ZOAU_HOME}/bin:${PYZ_HOME}/bin:/bin:/usr/sbin:/var/bin\\\n" +CONFIG=${CONFIG}" PYTHONSTDINENCODING: \"cp1047\"\\n" + +echo ${CONFIG} +} + +write_test_config(){ +unset CONFIG +host_zvm=$1 +pyz_version=$2 +zoau_version=$3 +managed_venv_path=$4 + +ssh_host_credentials "$host_zvm" +get_python_mount "$pyz_version" +get_zoau_mount "$zoau_version" + +CONFIG=${CONFIG}"host: ${host}\\n" +CONFIG=${CONFIG}"user: ${user}\\n" +CONFIG=${CONFIG}"python_path: ${PYZ_HOME}/bin/python3\\n" +CONFIG=${CONFIG}"\\n" +CONFIG=${CONFIG}"environment:\\n" +CONFIG=${CONFIG}" _BPXK_AUTOCVT: \"ON\"\\n" +CONFIG=${CONFIG}" _CEE_RUNOPTS: \"'FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)'\"\\n" +CONFIG=${CONFIG}" _TAG_REDIR_IN: txt\\n" +CONFIG=${CONFIG}" _TAG_REDIR_OUT: txt\\n" +CONFIG=${CONFIG}" LANG: C\\n" +CONFIG=${CONFIG}" ZOAU_HOME: ${ZOAU_HOME}\\n" +CONFIG=${CONFIG}" LIBPATH: ${ZOAU_HOME}/lib:${PYZ_HOME}/lib:/lib:/usr/lib:.\\n" +CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib\\n" +CONFIG=${CONFIG}" PATH: ${ZOAU_HOME}/bin:${PYZ_HOME}/bin:/bin:/usr/sbin:/var/bin\\n" +CONFIG=${CONFIG}" PYTHONSTDINENCODING: \"cp1047\"\\n" + +echo $CONFIG>$managed_venv_path/config.yml +} + +################################################################################ +# Main arg parser +################################################################################ + +case "$1" in +--cert) + ssh_host_credentials $2 + ssh_copy_key + ;; +--host-setup-files) #ec33017a "mounts.env" "mounts.sh" "shell-helper.sh" "profile.sh" + ssh_host_credentials $2 + ssh_copy_files_and_mount $3 $4 $5 + ;; +--targets) + get_host_ids + ;; +--config) + write_test_config $2 $3 $4 $5 + ;; +--disc) + discover_python + ;; +--vsetup) + discover_python + make_venv_dirs + #echo_requirements + write_requirements $3 + create_venv_and_pip_install_req + ;; +--latest_venv) + latest_venv + ;; +--perform-unit-test) + discover_python + #make_venv_dirs + echo_requirements + #write_requirements $3 + ;; +*) + echo "ERROR: unknown parameter $1" + ;; +esac From 42a805aea1d70c40c4b395f7a1a56dd3e6c2d379 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 26 May 2023 19:11:58 -0400 Subject: [PATCH 109/413] 347 new query fields (#778) * changing job.py to return 7 more fields, and for zos_job_query to pass them through * corrected testing to pull all new values through this assumes zoau 1.2.3 and z/OS at least 2.4 need to test older zoau to make sure this will still work * Added zoau version testing import to job.py so it won't reach for non-existent members. * pep8 and lint required changes * changed test to see if it will pass unit testing * Modified test_zos_data_set_func to skip HFS test if zOS > 02.04 * changed OS test for hfs usage * corrected usage of 'hosts'... removed the definition in prior edit. * changing OS version checker * corrected string extraction for OS version checker * added delete shell to 196/197 (finally of cat/uncat test) removed success message from 830 (version test logic) * removed the mvscmdauth call, as it coincides with some new test failures. * added changed=false back into testing of job_query * correction of zos->zoau name in comments. --- plugins/module_utils/job.py | 22 +++++- plugins/modules/zos_job_query.py | 48 +++++++++++ plugins/modules/zos_job_submit.py | 36 +++++++++ .../modules/test_zos_data_set_func.py | 79 +++++++++++-------- 4 files changed, 153 insertions(+), 32 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 94909aba4..d987d5a52 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -31,6 +31,11 @@ list_dds = MissingZOAUImport() listing = MissingZOAUImport() +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" + def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -200,6 +205,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] # e.g.: OMVSADM HELLO JOB00126 JCLERR ? # listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not + # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers + # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] + # creationdate=job[9] creationtime=job[10] queueposition=job[11] final_entries = [] entries = listing(job_id=job_id_temp) @@ -232,13 +240,25 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"] = {} job["ret_code"]["msg"] = entry.status + " " + entry.rc job["ret_code"]["msg_code"] = entry.rc - # Why was this set to an empty string? + job["ret_code"]["code"] = None if len(entry.rc) > 0: if entry.rc.isdigit(): job["ret_code"]["code"] = int(entry.rc) job["ret_code"]["msg_text"] = entry.status + # this section only works on zoau 1.2.3 vvv + + if ZOAU_API_VERSION > "1.2.2": + job["job_class"] = entry.job_class + job["svc_class"] = entry.svc_class + job["priority"] = entry.priority + job["asid"] = entry.asid + job["creation_datetime"] = entry.creation_datetime + job["queue_position"] = entry.queue_position + + # this section only works on zoau 1.2.3 ^^^ + job["class"] = "" job["content_type"] = "" job["ret_code"]["steps"] = [] diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 28d38b727..cb9a28a53 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -174,6 +174,36 @@ } ] } + job_class: + description: + Letter indicating job class for this job. + type: str + sample: A + svc_class: + description: + Character indicating service class for this job. + type: str + sample: C + priority: + description: + A numeric indicator of the job priority assigned through JES. + type: int + sample: 4 + asid: + description: + An identifier created by JES. + type: int + sample: 0 + creation_datetime: + description: + Date and time, local to the target system, when the job was created. + type: str + sample: 20230504T141500 + queue_position: + description: + Integer of the position within the job queue where this jobs resided. + type: int + sample: 3 sample: [ { @@ -181,12 +211,24 @@ "owner": "ADMIN", "job_id": "JOB01427", "ret_code": "null", + "job_class": "K", + "svc_class": "?", + "priority": 1, + "asid": 0, + "creation_datetime": "20230503T121300", + "queue_position": 3, }, { "job_name": "LINKCBL", "owner": "ADMIN", "job_id": "JOB16577", "ret_code": { "msg": "CANCELED", "code": "null" }, + "job_class": "A", + "svc_class": "E", + "priority": 0, + "asid": 4, + "creation_datetime": "20230503T121400", + "queue_position": 0, }, ] message: @@ -354,6 +396,12 @@ def parsing_jobs(jobs_raw): "system": job.get("system"), "subsystem": job.get("subsystem"), "ret_code": ret_code, + "job_class": job.get("job_class"), + "svc_class": job.get("svc_class"), + "priority": job.get("priority"), + "asid": job.get("asid"), + "creation_datetime": job.get("creation_datetime"), + "queue_position": job.get("queue_position"), } jobs.append(job_dict) return jobs diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index a58e138a1..97cbbc4a7 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -276,6 +276,36 @@ }, ] } + job_class: + description: + Letter indicating job class for this job. + type: str + sample: A + svc_class: + description: + Character indicating service class for this job. + type: str + sample: C + priority: + description: + A numeric indicator of the job priority assigned through JES. + type: int + sample: 4 + asid: + description: + An identifier created by JES. + type: int + sample: 0 + creation_datetime: + description: + Date and time, local to the target system, when the job was created. + type: str + sample: 20230504T141500 + queue_position: + description: + Integer of the position within the job queue where this jobs resided. + type: int + sample: 3 sample: [ { @@ -489,6 +519,12 @@ } ] }, + "job_class": "K", + "svc_class": "?", + "priority": 1, + "asid": 0, + "creation_datetime": "20230503T121300", + "queue_position": 3, "subsystem": "STL1" } ] diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 118fdcc18..c4833aa56 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -152,6 +152,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -809,42 +810,58 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): ["HFS", "ZFS"], ) def test_filesystem_create_and_mount(ansible_zos_module, filesystem): + fulltest = True + hosts = ansible_zos_module + try: - hosts = ansible_zos_module hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, type=filesystem) - temp_dir_name = make_tempfile(hosts, directory=True) - results2 = hosts.all.command( - cmd="mount -t {0} -f {1} {2}".format( - filesystem, DEFAULT_DATA_SET_NAME, temp_dir_name + + if filesystem == "HFS": + result0 = hosts.all.shell(cmd="zinfo -t sys") + for result in result0.contacted.values(): + sys_info = result.get("stdout_lines") + product_version = sys_info[4].split()[1].strip("'") + product_release = sys_info[5].split()[1].strip("'") + if product_release >= "05" or product_version > "02": + fulltest = False + print( "skipping HFS test: zOS > 02.04" ) + + if fulltest: + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, type=filesystem) + temp_dir_name = make_tempfile(hosts, directory=True) + results2 = hosts.all.command( + cmd="mount -t {0} -f {1} {2}".format( + filesystem, DEFAULT_DATA_SET_NAME, temp_dir_name + ) ) - ) - results3 = hosts.all.shell(cmd="cd {0} ; df .".format(temp_dir_name)) + results3 = hosts.all.shell(cmd="cd {0} ; df .".format(temp_dir_name)) - # clean up - results4 = hosts.all.command(cmd="unmount {0}".format(temp_dir_name)) - results5 = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results6 = hosts.all.file(path=temp_dir_name, state="absent") + # clean up + results4 = hosts.all.command(cmd="unmount {0}".format(temp_dir_name)) + results5 = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results6 = hosts.all.file(path=temp_dir_name, state="absent") - for result in results.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None - for result in results2.contacted.values(): - assert result.get("changed") is True - assert result.get("stderr") == "" - for result in results3.contacted.values(): - assert result.get("changed") is True - assert result.get("stderr") == "" - assert DEFAULT_DATA_SET_NAME.upper() in result.get("stdout", "") - for result in results4.contacted.values(): - assert result.get("changed") is True - assert result.get("stderr") == "" - for result in results5.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None - for result in results6.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + for result in results2.contacted.values(): + assert result.get("changed") is True + assert result.get("stderr") == "" + for result in results3.contacted.values(): + assert result.get("changed") is True + assert result.get("stderr") == "" + assert DEFAULT_DATA_SET_NAME.upper() in result.get("stdout", "") + for result in results4.contacted.values(): + assert result.get("changed") is True + assert result.get("stderr") == "" + for result in results5.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + for result in results6.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None finally: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From eb1ef33ba4cdc7513afedb5ef0f5d931e481554b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 29 May 2023 10:21:14 -0600 Subject: [PATCH 110/413] Missing fragment in PR 778 New query fields (#780) * added fragment for pr 778 * Added changelog fragment query new fields Added changelog fragment query new fields * Update 778-query-new-fields.yml --- changelogs/fragments/778-query-new-fields.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 changelogs/fragments/778-query-new-fields.yml diff --git a/changelogs/fragments/778-query-new-fields.yml b/changelogs/fragments/778-query-new-fields.yml new file mode 100644 index 000000000..9f2c71579 --- /dev/null +++ b/changelogs/fragments/778-query-new-fields.yml @@ -0,0 +1,5 @@ +minor_changes: +- zos_job_query - Adds new fields job_class, svc_class, priority, asid, + creation_datetime, and queue_position to the return output when querying + or submitting a job. Available when using ZOAU v1.2.3 or greater. + (https://github.com/ansible-collections/ibm_zos_core/pull/778) From 40dab5ed63fd64ee0bc062a168d403cf997b4f6c Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 31 May 2023 12:05:26 -0700 Subject: [PATCH 111/413] Update docs with ansible/ansible-core version, AAP and fix the dated git issue templates (#771) * Doc vesion updates Signed-off-by: ddimatos <dimatos@gmail.com> * Repository template updates and future proofing Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Formatting corrections for release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Upate issue templates with newer version of software Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 23 ++++++++--- .../ISSUE_TEMPLATE/collaboration_issue.yml | 23 ++++++++--- .github/ISSUE_TEMPLATE/doc_issue.yml | 40 ++++++++----------- .github/ISSUE_TEMPLATE/enabler_issue.yml | 2 +- .../enhancement_feature.issue.yml | 2 +- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- README.md | 11 ++++- changelogs/771-update-ansible-version.yaml | 7 ++++ docs/source/release_notes.rst | 11 +++-- 9 files changed, 80 insertions(+), 41 deletions(-) create mode 100644 changelogs/771-update-ansible-version.yaml diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 359add494..8a1cd3ccd 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -28,6 +28,9 @@ body: description: Which version of ZOAU are you using? multiple: false options: + - v1.2.5 + - v1.2.4 + - v1.2.3 - v1.2.2 - v1.2.1 - v1.2.0 @@ -42,6 +45,9 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: false options: + - v3.14.x + - v3.13.x + - v3.12.x - v3.11.x - v3.10.x - v3.9.x @@ -55,13 +61,17 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 + - v1.8.0-beta.1 + - v1.7.0 + - v1.7.0-beta.1 + - v1.6.0 + - v1.6.0-beta.1 - v1.5.0 - - v1.4.0 + - v1.4.1 - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - v1.2.1 - v1.1.0 - v1.0.0 @@ -75,6 +85,8 @@ body: multiple: false options: - latest + - v2.16.x + - v2.15.x - v2.14.x - v2.13.x - v2.12.x @@ -89,6 +101,7 @@ body: description: What is the version of z/OS on the managed node? multiple: false options: + - v3.1 - v2.5 - v2.4 - v2.3 diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index bf6db4778..c9ac9f151 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -40,6 +40,9 @@ body: description: Which version of ZOAU are you using? multiple: false options: + - v1.2.5 + - v1.2.4 + - v1.2.3 - v1.2.2 - v1.2.1 - v1.2.0 @@ -54,6 +57,9 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: true options: + - v3.14.x + - v3.13.x + - v3.12.x - v3.11.x - v3.10.x - v3.9.x @@ -67,13 +73,17 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 + - v1.8.0-beta.1 + - v1.7.0 + - v1.7.0-beta.1 + - v1.6.0 + - v1.6.0-beta.1 - v1.5.0 - - v1.4.0 + - v1.4.1 - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - v1.2.1 - v1.1.0 - v1.0.0 @@ -87,6 +97,8 @@ body: multiple: false options: - latest + - v2.16.x + - v2.15.x - v2.14.x - v2.13.x - v2.12.x @@ -101,6 +113,7 @@ body: description: What is the version of z/OS on the managed node? multiple: false options: + - v3.1 - v2.5 - v2.4 - v2.3 diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 07ddbc40e..5583ce5c1 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -1,5 +1,5 @@ name: Report a documentation issue -description: Request that documentation be reviewed. Complete all required fields. +description: Request that documentation be reviewed. Complete all required fields. title: "[Documentation] <title> " labels: [Documentation] assignees: @@ -27,35 +27,29 @@ body: 5. Include browser or shell if applicable validations: required: true - - type: textarea - id: ansible-version - attributes: - label: Ansible version - description: What is the version of Ansible on the controller if applicable. - placeholder: Paste verbatim output from `ansible --version`. - render: SHELL - validations: - required: false - type: dropdown id: collection-version attributes: label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true + description: Which version of z/OS Ansible core collection are you reporting a documentation bug. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 + - v1.8.0-beta.1 + - v1.7.0 + - v1.7.0-beta.1 + - v1.6.0 + - v1.6.0-beta.1 + - v1.5.0 + - v1.4.1 - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 + - v1.2.1 + - v1.1.0 + - v1.0.0 validations: - required: true + required: false - type: dropdown id: modules attributes: diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index 37131e500..abc9f16c2 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -1,7 +1,7 @@ name: Enabler task description: | Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. - Complete all required fields. + Complete all required fields. title: "[Enabler] <title> " labels: [Enabler] assignees: diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index d39840872..f5bc9325f 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -1,5 +1,5 @@ name: Request an enhancement or new feature -description: Request a new feature or an enhancement. Complete all required fields. +description: Request a new feature or an enhancement. Complete all required fields. title: "[Enhancement] <title> " labels: [Enhancement] assignees: diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index beea537e9..a7e7dcfa1 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -1,5 +1,5 @@ name: Request a new module -description: Request a new module be added to the collection. Complete all required fields. +description: Request a new module be added to the collection. Complete all required fields. title: "[Module] <title> " labels: [Module] assignees: diff --git a/README.md b/README.md index d6505759b..756f06d92 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,16 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against the following Ansible versions: >=2.9,<2.15. +This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.15. +The Ansible and Ansible Core versions supported for this collection align to the +[ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the +[Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** +and **ansible-core**. + +For **Ansible Automation Platform** (AAP) users, review the +[Ansible Automation Platform Certified Content](https://access.redhat.com/articles/3642632) +and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) +for more more information on supported versions of Ansible. Copyright ========= diff --git a/changelogs/771-update-ansible-version.yaml b/changelogs/771-update-ansible-version.yaml new file mode 100644 index 000000000..92354841b --- /dev/null +++ b/changelogs/771-update-ansible-version.yaml @@ -0,0 +1,7 @@ +trivial: +- doc - Updated the documentation in the README and release_notes.rst to reflect + ansible, ansible-core, Automation Hub and z/OS version. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) +- templates - Update the git issue templates with current and + future product versions. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index d897feef4..1e211ec89 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -27,12 +27,14 @@ Bugfixes -------- - ``zos_copy`` + - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. - Fixes a bug where the module would change the mode for a directory when copying in the contents of another directory. - Fixes a bug where the incorrect encoding would be used during normalization, particularly when processing newlines in files. - ``zos_encode`` - Fixes a bug where converted files were not tagged with the new code set afterwards. - ``zos_find`` - Fixes a bug where the module would stop searching and exit after the first value in a list was not found. - ``zos_lineinfile`` + - Removes use of Python f-string to ensure support for Python 2.7 on the controller. - Fixes a bug where an incorect error message would be raised when a USS source was not found. @@ -46,7 +48,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS Version`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -163,7 +165,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS Version`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -172,8 +174,7 @@ Version 1.4.1 ============= Bug fixes - --------------------------- +--------- * ``zos_copy`` @@ -856,6 +857,8 @@ Reference https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS V2R3: https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html +.. _z/OS Version: + https://www.ibm.com/docs/en/zos .. _FAQs: https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html From ca6edd2d983004ee2bca32824da814b427864473 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 2 Jun 2023 20:32:05 -0700 Subject: [PATCH 112/413] Update ac command supporting files (#789) * Update ac command supporting files Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/789-ac-command-updates.yml | 3 + scripts/hosts.env | 65 ++++++++++++++++--- scripts/mounts.env | 3 +- scripts/requirements-2.15.env | 32 +++++++++ 4 files changed, 93 insertions(+), 10 deletions(-) create mode 100644 changelogs/fragments/789-ac-command-updates.yml create mode 100644 scripts/requirements-2.15.env diff --git a/changelogs/fragments/789-ac-command-updates.yml b/changelogs/fragments/789-ac-command-updates.yml new file mode 100644 index 000000000..c0c60dcf1 --- /dev/null +++ b/changelogs/fragments/789-ac-command-updates.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Adds new mounts, targets and ansible 2.15 requirements.env. + (https://github.com/ansible-collections/ibm_zos_core/pull/789) \ No newline at end of file diff --git a/scripts/hosts.env b/scripts/hosts.env index 8351ba350..58075263d 100644 --- a/scripts/hosts.env +++ b/scripts/hosts.env @@ -22,21 +22,68 @@ # fi # fi -host_list_str="ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ +"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ +"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01105a:ec01105a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01129a:ec01129a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01130a:ec01130a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01131a:ec01131a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01133a:ec01133a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01136a:ec01136a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01137a:ec01137a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01138a:ec01138a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01139a:ec01139a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01145a:ec01145a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01146a:ec01146a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01147a:ec01147a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01148a:ec01148a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01149a:ec01149a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01152a:ec01152a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01153a:ec01153a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01154a:ec01154a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03071a:ec03071a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03102a:ec03102a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03127a:ec03127a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03129a:ec03129a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03173a:ec03173a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03175a:ec03175a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec32016a:ec32016a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec32024a:ec32024a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec32051a:ec32051a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33002a:ec33002a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33003a:ec33003a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33004a:ec33004a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33005a:ec33005a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33007a:ec33007a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33008a:ec33008a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33009a:ec33009a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33010a:ec33010a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33011a:ec33011a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33014a:ec33014a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33015a:ec33015a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33016a:ec33016a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33019a:ec33019a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33020a:ec33020a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33021a:ec33021a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33022a:ec33022a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33023a:ec33023a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ -"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ -"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ -"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} " +"ec33027a:ec33027a${HOST_SUFFIX}:${USER}:${PASS} " diff --git a/scripts/mounts.env b/scripts/mounts.env index 8f944d971..876876cd3 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -38,7 +38,8 @@ zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ "10:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ "11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ "12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ -"13:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +"13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ +"14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env new file mode 100644 index 000000000..5f8b36260 --- /dev/null +++ b/scripts/requirements-2.15.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.15.0" +"pylint" +"rstcheck" +) + +python=( +"python:3.9" +) From 558ef8b41c83d8e0f69a6bdb1b04fd4529e69729 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 2 Jun 2023 21:57:03 -0700 Subject: [PATCH 113/413] Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_data_set.rst | 12 ++++++++--- plugins/modules/zos_data_set.py | 30 +++++++++++++++++++--------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 046b8a2f5..c310069e8 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -65,6 +65,9 @@ state If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. @@ -74,7 +77,7 @@ state If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If *state=uncataloged* and the data set is not found, no action taken , module completes successfully with *changed=False*. + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. @@ -330,6 +333,9 @@ batch If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. @@ -339,7 +345,7 @@ batch If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If *state=uncataloged* and the data set is not found, no action taken , module completes successfully with *changed=False*. + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. @@ -352,7 +358,7 @@ batch type - The data set type to be used when creating a data set. (e.g ``pdse``) + The data set type to be used when creating a data set. (e.g ``PDSE``) ``MEMBER`` expects to be used with an existing partitioned data set. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 3e7ee1700..dde8f3488 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -67,6 +67,12 @@ - > If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). + - > + If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + create a member formatted to store data, module completes successfully with I(changed=True). + Note, a PDSE does not allow a mixture of formats such that there is + executables (program objects) and data. The member created is formatted to store data, + not an executable. - > If I(state=cataloged) and I(volumes) is provided and the data set is already cataloged, no action taken, module completes successfully with I(changed=False). @@ -79,11 +85,11 @@ module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, returns failure with I(changed=False). - > - If I(state=uncataloged) and the data set is not found, - no action taken , module completes successfully with I(changed=False). + If I(state=uncataloged) and the data set is not found, no action taken, + module completes successfully with I(changed=False). - > - If I(state=uncataloged) and the data set is found, - the data set is uncataloged, module completes successfully with I(changed=True). + If I(state=uncataloged) and the data set is found, the data set is uncataloged, + module completes successfully with I(changed=True). required: false type: str default: present @@ -314,6 +320,12 @@ - > If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). + - > + If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + create a member formatted to store data, module completes successfully with I(changed=True). + Note, a PDSE does not allow a mixture of formats such that there is + executables (program objects) and data. The member created is formatted to store data, + not an executable. - > If I(state=cataloged) and I(volumes) is provided and the data set is already cataloged, no action taken, module completes successfully with I(changed=False). @@ -326,11 +338,11 @@ module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, returns failure with I(changed=False). - > - If I(state=uncataloged) and the data set is not found, - no action taken , module completes successfully with I(changed=False). + If I(state=uncataloged) and the data set is not found, no action taken, + module completes successfully with I(changed=False). - > - If I(state=uncataloged) and the data set is found, - the data set is uncataloged, module completes successfully with I(changed=True). + If I(state=uncataloged) and the data set is found, the data set is uncataloged, + module completes successfully with I(changed=True). required: false type: str default: present @@ -341,7 +353,7 @@ - uncataloged type: description: - - The data set type to be used when creating a data set. (e.g C(pdse)) + - The data set type to be used when creating a data set. (e.g C(PDSE)) - C(MEMBER) expects to be used with an existing partitioned data set. - Choices are case-insensitive. required: false From ac8559ae4f7d83b2f380332fd4b71ba219b26a19 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 2 Jun 2023 21:57:47 -0700 Subject: [PATCH 114/413] Add recently changed module doc from prior commits Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_query.rst | 51 +++++++++++++++++++++++++- docs/source/modules/zos_job_submit.rst | 43 +++++++++++++++++++++- 2 files changed, 91 insertions(+), 3 deletions(-) diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index d34098617..40bd7b353 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -133,19 +133,31 @@ jobs [ { + "asid": 0, + "creation_datetime": "20230503T121300", + "job_class": "K", "job_id": "JOB01427", "job_name": "LINKJOB", "owner": "ADMIN", - "ret_code": "null" + "priority": 1, + "queue_position": 3, + "ret_code": "null", + "svc_class": "?" }, { + "asid": 4, + "creation_datetime": "20230503T121400", + "job_class": "A", "job_id": "JOB16577", "job_name": "LINKCBL", "owner": "ADMIN", + "priority": 0, + "queue_position": 0, "ret_code": { "code": "null", "msg": "CANCELED" - } + }, + "svc_class": "E" } ] @@ -232,6 +244,41 @@ jobs + job_class + Letter indicating job class for this job. + + | **type**: str + | **sample**: A + + svc_class + Character indicating service class for this job. + + | **type**: str + | **sample**: C + + priority + A numeric indicator of the job priority assigned through JES. + + | **type**: int + | **sample**: 4 + + asid + An identifier created by JES. + + | **type**: int + + creation_datetime + Date and time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 20230504T141500 + + queue_position + Integer of the position within the job queue where this jobs resided. + + | **type**: int + | **sample**: 3 + message Message returned on failure. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index bb438f8a5..4375564bb 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -223,8 +223,10 @@ jobs [ { + "asid": 0, "class": "K", "content_type": "JOB", + "creation_datetime": "20230503T121300", "ddnames": [ { "byte_count": "677", @@ -419,9 +421,12 @@ jobs "stepname": "DLORD6" } ], + "job_class": "K", "job_id": "JOB00361", "job_name": "DBDGEN00", "owner": "OMVSADM", + "priority": 1, + "queue_position": 3, "ret_code": { "code": 0, "msg": "CC 0000", @@ -434,7 +439,8 @@ jobs } ] }, - "subsystem": "STL1" + "subsystem": "STL1", + "svc_class": "?" } ] @@ -588,6 +594,41 @@ jobs + job_class + Letter indicating job class for this job. + + | **type**: str + | **sample**: A + + svc_class + Character indicating service class for this job. + + | **type**: str + | **sample**: C + + priority + A numeric indicator of the job priority assigned through JES. + + | **type**: int + | **sample**: 4 + + asid + An identifier created by JES. + + | **type**: int + + creation_datetime + Date and time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 20230504T141500 + + queue_position + Integer of the position within the job queue where this jobs resided. + + | **type**: int + | **sample**: 3 + message This option is being deprecated From 84060ffa374312aa2395cdcc5725c4b74c77ee3b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 2 Jun 2023 22:07:57 -0700 Subject: [PATCH 115/413] Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/791-doc-zos_data_set-member-update.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 changelogs/fragments/791-doc-zos_data_set-member-update.yml diff --git a/changelogs/fragments/791-doc-zos_data_set-member-update.yml b/changelogs/fragments/791-doc-zos_data_set-member-update.yml new file mode 100644 index 000000000..4ab0eee03 --- /dev/null +++ b/changelogs/fragments/791-doc-zos_data_set-member-update.yml @@ -0,0 +1,5 @@ +trivial: +- zos_data_set - when a member is created by the module, the format is type + data which is not suitable for executables. This change describes the + format used when creating member. + (https://github.com/ansible-collections/ibm_zos_core/pull/791) \ No newline at end of file From 3ab9d7821b3510967683fc18a4141b816e343b0d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 6 Jun 2023 01:04:21 -0700 Subject: [PATCH 116/413] Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 72 ++++++++++++++++++++++++++------ 1 file changed, 60 insertions(+), 12 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 8295a6541..d2781c0d5 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -429,17 +429,60 @@ def delete_uncataloged_dataset(name, volumes): Returns: bool -- Return code from the mvs_cmd, if 0 then it was successful. """ - # if is VSAM is_vsam(name, volumes) - vsam_code = 'NVR' - vsam_name_extension = '' + + # NVR specifies that the object to be deleted is an SMS-managed non-VSAM + # volume record (NVR) entry. This parameter must be specified to delete + # an NVR from a VSAM volume data set (VVDS) and its corresponding record + # from the VTOC. The NVR/VTOC entries are deleted only if the related + # non-VSAM object catalog entry does not exist. + + # VVR specifies that the objects to be deleted are one or more unrelated + # VSAM volume record (VVR) entries. To delete a VVR from both the VSAM + # volume data set (VVDS) and from the VTOC, you must specify this parameter. + + # To delete a VSAM DS that is not cataloged you must delete each VSAM record + # for that VSAM type and use VVR and FILE. You can simulate a uncataloged DS + # with commands: + # - echo " DELETE IBMUSER.VSAM.KSDS CLUSTER NOSCRATCH NOPURGE" | + # mvscmdauth --pgm=IDCAMS --sysprint=* --sysin=stdin + # echo " DELETE IBMUSER.DATASET NOSCRATCH" | mvscmdauth --pgm=IDCAMS + # --sysprint=* --sysin=stdin + if DataSet.is_vsam(name, volumes): - vsam_code = 'VVR' + vol_record_entry = 'VVR' + # Delete the DATA record of a VSAM, applies to KSDS, RRDS, ESDS, LDS vsam_name_extension = '.DATA' - command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vsam_code) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - if rc > 0: - raise DatasetDeleteError(name, rc) + command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) + dds = dict(DD1=',vol,'.join(volumes) + ',vol') + rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) + # RC 8 occurs when the VSAM Record does not exist, thus acceptable + if rc > 8: + raise DatasetDeleteError(name, rc) + + # Delete the INDEX record of a VSAM, this does NOT apply to RRDS, ESDS, LDS but + # the VASAM is not in catalog so we can't detect the type of VSAM so we + # can expect an RC 8 to appear for non KSDS types. + vsam_name_extension = '.INDEX' + command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) + dds = dict(DD1=',vol,'.join(volumes) + ',vol') + rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) + # RC 8 occurs when the VSAM Record does exist, thus acceptable + if rc > 8: + raise DatasetDeleteError(name, rc) + else: + vol_record_entry = 'NVR' + command = " DELETE {0} FILE(DD1) {1}".format(name, vol_record_entry) + dds = dict(DD1=',vol,'.join(volumes) + ',vol') + rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) + # RC 8 occurs when the VSAM Record does not exist, thus acceptable + if rc > 8: + raise DatasetDeleteError(name, rc) + + # Callers expect a RC 0 to evaluate if there was a change, so normalize + # to rc 0 + if rc <= 8: + rc = 0 + return rc @staticmethod @@ -464,7 +507,8 @@ def data_set_shared_members(src, dest): @staticmethod def attempt_to_delete_uncataloged_data_set_if_necessary(name, volumes): - """Attempt to delete any uncataloged dataset if exists on any volume and there is a cataloged dataset with the same name. + """Attempt to delete any uncataloged dataset if exists on any user provided volumes + and there is a cataloged dataset with the same name. Arguments: name (str) -- The data set name to check if cataloged. volumes (list[str]) -- The volumes the data set may reside on. @@ -481,9 +525,13 @@ def attempt_to_delete_uncataloged_data_set_if_necessary(name, volumes): cataloged_volume_list = DataSet.get_volume_list_for_cataloged_data_set(name) if len(cataloged_volume_list) == 0: return changed, present, True - # If any volume provided is not in the list, means we need to delete it from uncataloged dataset. + + # If a volume provided (volumes) is not in the list cataloged_volume_list, we need to + # delete them from the cataloged_volume_list, this leaves us with with uncataloged data sets that + # correspond to the volumes argument. volumes_for_uncataloged_dataset = list(filter(lambda vol: vol not in cataloged_volume_list, volumes)) - # If any volume provided is in the list we will delete from the catalog as normal. + + # If any volume provided (volumes) is in the list we will delete from catalog as normal. pending_to_delete_cataloged_dataset = any(vol in volumes for vol in cataloged_volume_list) if len(volumes_for_uncataloged_dataset) > 0: From 6b02d2a82a062458fddd0f5ffcbbb25961c86792 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 6 Jun 2023 09:00:23 -0600 Subject: [PATCH 117/413] Encode files recursively and test case for keep behavior. (#772) * Bring the jinja2 solution to dev and add test case * Add fragment * Solve problem z/OS 2.5 HFS * Declaration error solve * Need to check the validation with HFS * Ensure validating z/OS work with HFS * Change inecesary changes and fragments q * Return all test cases to normal * Return all test cases to normal * Create the local test case * Add local test case and change test case to be acurate * Get better cleanup of test-case * Update test_zos_data_set_func.py Equalize test mount func --- ...sively-and-test-case-for-keep-behavior.yml | 5 ++ plugins/modules/zos_copy.py | 18 ++-- .../functional/modules/test_zos_copy_func.py | 84 ++++++++++++++++++- 3 files changed, 96 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml diff --git a/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml b/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml new file mode 100644 index 000000000..672c454b7 --- /dev/null +++ b/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. + Fix now encodes all content in a source directory, including + subdirectories. + (https://github.com/ansible-collections/ibm_zos_core/pull/772). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 739c0d8d0..e5df77787 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -866,17 +866,17 @@ def _convert_encoding_dir(self, dir_path, from_code_set, to_code_set): EncodingConversionError -- When the encoding of a USS file is not able to be converted """ - path, dirs, files = next(os.walk(dir_path)) enc_utils = encode.EncodeUtils() - for file in files: - full_file_path = path + "/" + file - rc = enc_utils.uss_convert_encoding( - full_file_path, full_file_path, from_code_set, to_code_set - ) - if not rc: - raise EncodingConversionError( - full_file_path, from_code_set, to_code_set + for path, dirs, files in os.walk(dir_path): + for file_path in files: + full_file_path = os.path.join(path, file_path) + rc = enc_utils.uss_convert_encoding( + full_file_path, full_file_path, from_code_set, to_code_set ) + if not rc: + raise EncodingConversionError( + full_file_path, from_code_set, to_code_set + ) def _tag_file_encoding(self, file_path, tag, is_dir=False): """Tag the file specified by 'file_path' with the given code set. diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 97ec099dc..781ec80bc 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -29,8 +29,7 @@ DUMMY DATA ---- LINE 004 ------ DUMMY DATA ---- LINE 005 ------ DUMMY DATA ---- LINE 006 ------ -DUMMY DATA ---- LINE 007 ------ -""" +DUMMY DATA ---- LINE 007 ------""" DUMMY_DATA_SPECIAL_CHARS = """DUMMY DATA ---- LINE 001 ------ DUMMY DATA ---- LINE ÁÁÁ------ @@ -468,6 +467,87 @@ def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): hosts.all.file(path=dest_dir, state="absent") +@pytest.mark.uss +def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + text_outer_file = "Hi I am point A" + text_inner_file = "Hi I am point B" + src_path = "/tmp/level_1/" + outer_file = "/tmp/level_1/text_A.txt" + inner_src_path = "/tmp/level_1/level_2/" + inner_file = "/tmp/level_1/level_2/text_B.txt" + + try: + hosts.all.file(path=inner_src_path, state="directory") + hosts.all.file(path=inner_file, state = "touch") + hosts.all.file(path=outer_file, state = "touch") + hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_outer_file, outer_file)) + hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_inner_file, inner_file)) + + copy_res = hosts.all.zos_copy(src=src_path, dest=dest_path, encoding={"from": "ISO8859-1", "to": "IBM-1047"}, remote_src=True) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + stat_res = hosts.all.stat(path="/tmp/test/level_2/") + for st in stat_res.contacted.values(): + assert st.get("stat").get("exists") is True + + full_inner_path = dest_path + "/level_2/text_B.txt" + full_outer_path = dest_path + "/text_A.txt" + inner_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_inner_path)) + outer_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_outer_path)) + for text in outer_file_text_aft_encoding.contacted.values(): + text_outer = text.get("stdout") + for text in inner_file_text_aft_encoding.contacted.values(): + text_inner = text.get("stdout") + + assert text_inner == text_inner_file + assert text_outer == text_outer_file + finally: + hosts.all.file(path=src_path, state="absent") + hosts.all.file(path=dest_path, state="absent") + + +@pytest.mark.uss +def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + + try: + source_1 = tempfile.TemporaryDirectory(prefix="level_", suffix="_1") + source = source_1.name + source_2 = tempfile.TemporaryDirectory(dir = source, prefix="level_", suffix="_2") + full_source = source_2.name + populate_dir(source) + populate_dir(full_source) + level_1 = os.path.basename(source) + level_2 = os.path.basename(full_source) + + copy_res = hosts.all.zos_copy(src=source, dest=dest_path, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + full_outer_file= "{0}/{1}/file3".format(dest_path, level_1) + full_iner_file= "{0}/{1}/{2}/file3".format(dest_path, level_1, level_2) + verify_copy_1 = hosts.all.shell(cmd="cat {0}".format(full_outer_file)) + verify_copy_2 = hosts.all.shell(cmd="cat {0}".format(full_iner_file)) + + for result in verify_copy_1.contacted.values(): + print(result) + assert result.get("stdout") == DUMMY_DATA + for result in verify_copy_2.contacted.values(): + print(result) + assert result.get("stdout") == DUMMY_DATA + finally: + hosts.all.file(name=dest_path, state="absent") + source_1.cleanup(ignore_cleanup_errors = True) + + @pytest.mark.uss @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): From 10ab418addd70167da31559d3b0aa592660bad8a Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 6 Jun 2023 13:41:46 -0700 Subject: [PATCH 118/413] Update ac to support a single test (#793) * Update ac to support a single test Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update test description Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 42 ++++++++++++------- .../fragments/789-ac-command-add-test.yml | 3 ++ 2 files changed, 31 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/789-ac-command-add-test.yml diff --git a/ac b/ac index b01fa8bf8..aeb122423 100755 --- a/ac +++ b/ac @@ -188,6 +188,9 @@ option_processor(){ fi } +# If option_processor echo's an exit , the sanitize will execute it else it will +# just be a echo, might be worth seeing if this can just be called or embedded +# into the option_processor to simplify the calls option_sanitize(){ option_value=$1 $option_value 2> /dev/null @@ -317,9 +320,12 @@ ac_sanity(){ ## zoau - ZOAU to use in testing, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1, ## no selection defaults to 1.1.1 . ## file - the absoulte path to a test suite to run, no selection -## defaults to all tests running. +## defaults to all test suite running. +## test - a test case to run found in 'file', no selection +## defaults to all tests in file running. ## debug - enable debug for pytest (-s), choices are true and false ## Example: +## $ ac --ac-test --host ec01150a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --test test_zos_operator_positive_path --debug true ## $ ac --ac-test --host ec33012a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test @@ -328,7 +334,18 @@ ac_test(){ python=$2 zoau=$3 file=$4 - debug=$5 + test=$5 + debug=$6 + + # Run test by node IDs, eg pytest -v tests/my-directory/test_demo.py::test_specific_function + if [ "$file" ] && [ "$test" ]; then + file="${file}::${test}" + fi + + if [ "$debug" ]; then + debug="-s" + fi + skip=$CURR_DIR/tests/functional/modules/test_module_security.py # Create the config always overwriting existing @@ -343,20 +360,12 @@ ac_test(){ #cd ${VENV_BIN} if [ "$file" ]; then - if [ "$debug" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s - else - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml - fi + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml "${debug}" else for file in `ls tests/functional/modules/*.py`; do - # For some reason '--ignor'e not being honored so injecting a work around + # For some reason '--ignore not being honored so injecting a work around if [ "$file" != "$skip" ]; then - if [ "$debug" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s - else - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml - fi + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml "${debug}" fi done fi @@ -722,6 +731,11 @@ while true; do option_sanitize $python shift ;; + --test|--test=?*) # option + test=`option_processor $1 $2` + option_sanitize $test + shift + ;; # --tests|--tests=?*) # option # tests=`option_processor $1 $2` # option_sanitize $tests @@ -771,7 +785,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then ac_sanity $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then - ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${debug:=""} + ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${test:=""} ${debug:=""} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test-config" ] ; then ac_test_config elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-version" ] ; then diff --git a/changelogs/fragments/789-ac-command-add-test.yml b/changelogs/fragments/789-ac-command-add-test.yml new file mode 100644 index 000000000..56cae6936 --- /dev/null +++ b/changelogs/fragments/789-ac-command-add-test.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Adds support to run single test from test suite. + (https://github.com/ansible-collections/ibm_zos_core/pull/793) \ No newline at end of file From 98c70475ae959caae753bde9282500982a7340d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 8 Jun 2023 09:25:05 -0600 Subject: [PATCH 119/413] Return the dynamically created destination attributes (#773) * First iteration to get dynamic values * Spaces and lines rectified * Add validation and extra variable to ensure consistency * Whitespaces * Change imports in test_zos_mount_func * Update test_zos_fetch_func imports * Update all imports for pipelines runs * Revert "Update all imports for pipelines runs" This reverts commit 1b370a2ba3c0001c316e0121ddab82ae7cc6d75d. Return one commit * Update data_set.py imports * Revert "Update data_set.py imports" This reverts commit 37561b0a12e04faaee8307a5541b71469dbe721d. * Update data_set imports * Update data_set imports * Update data_set imports * Restore import * Restore the imports * Add fragment * Solve a typo * Solve z/OS 2.5 HFS * Solve declaration error * Solve HFS and solution by now * Ensure HFS working with HFS * Better working on HFS testing problems * Change to cover many cases and add test * Modified changelog, corrected typos and shortemed file name * Delete 773-Return-the-dynamically-created-destintation-attributres.yaml * Update test_zos_data_set_func.py * Add documentation * Adjust spaces * Solve spaces in documentation * Solve problems on spaces in documentation * Adjust fragment and add validation for vsams * Better redaction to documentation * Solve spaces * Change documentation of code and collection * Change words in documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...return-dynamically-created-dest-attrs.yaml | 6 + plugins/action/zos_copy.py | 7 +- plugins/modules/zos_copy.py | 109 +++++++++++++++++- .../functional/modules/test_zos_copy_func.py | 7 ++ 4 files changed, 124 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml diff --git a/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml b/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml new file mode 100644 index 000000000..0a8ce0adb --- /dev/null +++ b/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml @@ -0,0 +1,6 @@ +minor_changes: +- zos_copy - Adds block_size, record_format, record_length, space_primary, + space_secondary, space_type and type in the return output when + the destination data set does not exist and has to be created + by the module. + (https://github.com/ansible-collections/ibm_zos_core/pull/773) \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 6847b9ac5..5fa861b61 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -344,6 +344,7 @@ def _update_result(is_binary, copy_res, original_args): src = copy_res.get("src") note = copy_res.get("note") backup_name = copy_res.get("backup_name") + dest_data_set_attrs = copy_res.get("dest_data_set_attrs") updated_result = dict( dest=copy_res.get("dest"), is_binary=is_binary, @@ -356,7 +357,6 @@ def _update_result(is_binary, copy_res, original_args): updated_result["note"] = note if backup_name: updated_result["backup_name"] = backup_name - if ds_type == "USS": updated_result.update( dict( @@ -372,6 +372,11 @@ def _update_result(is_binary, copy_res, original_args): checksum = copy_res.get("checksum") if checksum: updated_result["checksum"] = checksum + if dest_data_set_attrs is not None: + if len(dest_data_set_attrs) > 0: + dest_data_set_attrs.pop("name") + updated_result["dest_created"] = True + updated_result["destination_attributes"] = dest_data_set_attrs return updated_result diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index e5df77787..ffb9ccbe4 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -563,6 +563,61 @@ returned: success type: str sample: SAMPLE.SEQ.DATA.SET +dest_created: + description: Indicates whether the module created the destination. + returned: success and if dest was created by the module. + type: bool + sample: true +destination_attributes: + description: Attributes of a dest created by the module. + returned: success and destination was created by the module. + type: dict + contains: + block_size: + description: + Block size of the dataset. + type: int + sample: 32760 + record_format: + description: + Record format of the dataset. + type: str + sample: FB + record_length: + description: + Record length of the dataset. + type: int + sample: 45 + space_primary: + description: + Allocated primary space for the dataset. + type: int + sample: 2 + space_secondary: + description: + Allocated secondary space for the dataset. + type: int + sample: 1 + space_type: + description: + Unit of measurement for space. + type: str + sample: K + type: + description: + Type of dataset allocated. + type: str + sample: PDSE + sample: + { + "block_size": 32760, + "record_format": "FB", + "record_length": 45, + "space_primary": 2, + "space_secondary": 1, + "space_type": "K", + "type": "PDSE" + } checksum: description: SHA256 checksum of the file after running zos_copy. returned: C(validate) is C(true) and if dest is USS @@ -1977,6 +2032,45 @@ def is_member_wildcard(src): ) +def get_attributes_of_any_dataset_created( + dest, + src_ds_type, + src, + src_name, + is_binary, + volume=None +): + """ + Get the attributes of dataset created by the function allocate_destination_data_set + except for VSAM. + + Arguments: + dest (str) -- Name of the destination data set. + src_ds_type (str) -- Source of the destination data set. + src (str) -- Name of the source data set, used as a model when appropiate. + src_name (str) -- Extraction of the source name without the member pattern. + is_binary (bool) -- Whether the data set will contain binary data. + volume (str, optional) -- Volume where the data set should be allocated into. + + Returns: + params (dict) -- Parameters used for the dataset created as name, type, + space_primary, space_secondary, record_format, record_length, block_size and space_type + """ + params = {} + if src_ds_type == "USS": + if os.path.isfile(src): + size = os.stat(src).st_size + params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + else: + size = os.path.getsize(src) + params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + else: + src_attributes = datasets.listing(src_name)[0] + size = int(src_attributes.total_space) + params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + return params + + def allocate_destination_data_set( src, dest, @@ -2006,6 +2100,9 @@ def allocate_destination_data_set( Returns: bool -- True if the data set was created, False otherwise. + dest_params (dict) -- Parameters used for the dataset created as name, + block_size, record_format, record_length, space_primary, space_secondary, + space_type, type. """ src_name = data_set.extract_dsname(src) is_dest_empty = data_set.DataSet.is_empty(dest) if dest_exists else True @@ -2014,8 +2111,11 @@ def allocate_destination_data_set( # empty dataset was created for the user by an admin/operator, and they don't have permissions # to create new datasets. # These rules assume that source and destination types are compatible. + # Create the dict that will contains the values created by the module if it's empty action module will + # not display the content. + dest_params = {} if dest_exists and is_dest_empty: - return False + return False, dest_params # Giving more priority to the parameters given by the user. if dest_data_set: @@ -2086,8 +2186,9 @@ def allocate_destination_data_set( volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) - - return True + if dest_ds_type not in data_set.DataSet.MVS_VSAM: + dest_params = get_attributes_of_any_dataset_created(dest, src_ds_type, src, src_name, is_binary, volume) + return True, dest_params def normalize_line_endings(src, encoding=None): @@ -2449,7 +2550,7 @@ def run_module(module, arg_def): try: if not is_uss: - res_args["changed"] = allocate_destination_data_set( + res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set( temp_path or src, dest_name, src_ds_type, dest_ds_type, diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 781ec80bc..374bf2b47 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1295,6 +1295,7 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True assert cp_res.get("is_binary") == src["is_binary"] for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 @@ -1467,6 +1468,7 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -1816,6 +1818,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_path + assert cp_res.get("dest_created") is True for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -1844,6 +1847,7 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 finally: @@ -1875,6 +1879,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert len(result.get("stdout_lines")) == 2 @@ -1954,6 +1959,7 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -2415,6 +2421,7 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" From b121c38e09e6016b48fd1ec652c1c27476c5c4ad Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 15:34:03 -0700 Subject: [PATCH 120/413] Updated ac command to clean up the collections directory Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ac b/ac index b01fa8bf8..45e0cf1ec 100755 --- a/ac +++ b/ac @@ -361,6 +361,9 @@ ac_test(){ done fi + # Clean up the collections folder after running the tests, temporary work around. + rm -rf collections/ansible_collections + #cd ${CURR_DIR} } From f94eb0c23850027328e4d20f07a224ab76b07c0b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 15:48:40 -0700 Subject: [PATCH 121/413] Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 338 ++++++++++-------- .../modules/test_zos_data_set_func.py | 83 +++-- 2 files changed, 227 insertions(+), 194 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index d2781c0d5..f1aa1ee22 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -13,6 +13,7 @@ __metaclass__ = type +import pprint import re import tempfile from os import path, walk @@ -68,25 +69,18 @@ class DataSet(object): } _VSAM_CATALOG_COMMAND_NOT_INDEXED = """ DEFINE CLUSTER - - (NAME('{0}') - - VOLUMES({1} - - ) - - RECATALOG - - {2}) - - DATA( - - NAME('{0}.DATA')) + (NAME('{0}') - + VOLUMES({1}) - + RECATALOG {2}) - + DATA(NAME('{0}.DATA')) """ _VSAM_CATALOG_COMMAND_INDEXED = """ DEFINE CLUSTER - - (NAME('{0}') - - VOLUMES({1} - - ) - - RECATALOG - - {2}) - - DATA( - - NAME('{0}.DATA')) - - INDEX( - - NAME('{0}.INDEX')) + (NAME('{0}') - + VOLUMES({1}) - + RECATALOG {2}) - + DATA(NAME('{0}.DATA')) - + INDEX(NAME('{0}.INDEX')) """ _NON_VSAM_UNCATALOG_COMMAND = " UNCATLG DSNAME={0}" @@ -214,22 +208,14 @@ def ensure_absent(name, volumes=None): name (str) -- The name of the data set to ensure is absent. volumes (list[str]) -- The volumes the data set may reside on. Returns: - bool -- Indicates if changes were made. + changed (bool) -- Indicates if changes were made. """ - if volumes: - changed, present, pending_to_catalog_and_delete = DataSet.attempt_to_delete_uncataloged_data_set_if_necessary( - name, volumes) - if not pending_to_catalog_and_delete: - return changed - present, changed = DataSet.attempt_catalog_if_necessary(name, volumes) - if present: - DataSet.delete(name) - return True - return False + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) - # ? should we do additional check to ensure member was actually created? + return changed + # ? should we do additional check to ensure member was actually created? @staticmethod def ensure_member_present(name, replace=False): """Creates data set member if it does not already exist. @@ -270,7 +256,7 @@ def ensure_cataloged(name, volumes): Returns: bool -- If changes were made. """ - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, None): return False try: DataSet.catalog(name, volumes) @@ -345,7 +331,7 @@ def allocate_model_data_set(ds_name, model, vol=None): raise MVSCmdExecError(rc, out, err) @staticmethod - def data_set_cataloged(name): + def data_set_cataloged(name, volumes=None): """Determine if a data set is in catalog. Arguments: @@ -354,18 +340,26 @@ def data_set_cataloged(name): Returns: bool -- If data is is cataloged. """ + name = name.upper() module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) rc, stdout, stderr = module.run_command( "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin ) - if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): - return True + + if volumes: + cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] + if bool(set(volumes) & set (cataloged_volume_list)): + return True + else: + if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): + return True + return False @staticmethod - def get_volume_list_for_cataloged_data_set(name): + def data_set_cataloged_volume_list(name): """Get the volume list for a cataloged dataset name. Arguments: name (str) -- The data set name to check if cataloged. @@ -380,8 +374,8 @@ def get_volume_list_for_cataloged_data_set(name): ) delimiter = 'VOLSER------------' arr = stdout.split(delimiter) - # If a volume serial is not always of lenght 6 we could use ":x.find(' ')" here instead of that index. - volume_list = [x[:x.find(' ')] for x in arr[1:]] + # A volume serial (VOLSER) is not always of fixed length, use ":x.find(' ')" here instead of arr[index]. + volume_list = list(set([x[:x.find(' ')] for x in arr[1:]])) return volume_list @staticmethod @@ -420,71 +414,6 @@ def data_set_member_exists(name): return False return True - @staticmethod - def delete_uncataloged_dataset(name, volumes): - """Delete an uncataloged dataset by specifying volumes. - Arguments: - name (str) -- The data set name to check if cataloged. - volumes (list[str]) -- The volumes the data set may reside on. - Returns: - bool -- Return code from the mvs_cmd, if 0 then it was successful. - """ - - # NVR specifies that the object to be deleted is an SMS-managed non-VSAM - # volume record (NVR) entry. This parameter must be specified to delete - # an NVR from a VSAM volume data set (VVDS) and its corresponding record - # from the VTOC. The NVR/VTOC entries are deleted only if the related - # non-VSAM object catalog entry does not exist. - - # VVR specifies that the objects to be deleted are one or more unrelated - # VSAM volume record (VVR) entries. To delete a VVR from both the VSAM - # volume data set (VVDS) and from the VTOC, you must specify this parameter. - - # To delete a VSAM DS that is not cataloged you must delete each VSAM record - # for that VSAM type and use VVR and FILE. You can simulate a uncataloged DS - # with commands: - # - echo " DELETE IBMUSER.VSAM.KSDS CLUSTER NOSCRATCH NOPURGE" | - # mvscmdauth --pgm=IDCAMS --sysprint=* --sysin=stdin - # echo " DELETE IBMUSER.DATASET NOSCRATCH" | mvscmdauth --pgm=IDCAMS - # --sysprint=* --sysin=stdin - - if DataSet.is_vsam(name, volumes): - vol_record_entry = 'VVR' - # Delete the DATA record of a VSAM, applies to KSDS, RRDS, ESDS, LDS - vsam_name_extension = '.DATA' - command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - # RC 8 occurs when the VSAM Record does not exist, thus acceptable - if rc > 8: - raise DatasetDeleteError(name, rc) - - # Delete the INDEX record of a VSAM, this does NOT apply to RRDS, ESDS, LDS but - # the VASAM is not in catalog so we can't detect the type of VSAM so we - # can expect an RC 8 to appear for non KSDS types. - vsam_name_extension = '.INDEX' - command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - # RC 8 occurs when the VSAM Record does exist, thus acceptable - if rc > 8: - raise DatasetDeleteError(name, rc) - else: - vol_record_entry = 'NVR' - command = " DELETE {0} FILE(DD1) {1}".format(name, vol_record_entry) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - # RC 8 occurs when the VSAM Record does not exist, thus acceptable - if rc > 8: - raise DatasetDeleteError(name, rc) - - # Callers expect a RC 0 to evaluate if there was a change, so normalize - # to rc 0 - if rc <= 8: - rc = 0 - - return rc - @staticmethod def data_set_shared_members(src, dest): """Checks for the existence of members from a source data set in @@ -505,45 +434,6 @@ def data_set_shared_members(src, dest): return False - @staticmethod - def attempt_to_delete_uncataloged_data_set_if_necessary(name, volumes): - """Attempt to delete any uncataloged dataset if exists on any user provided volumes - and there is a cataloged dataset with the same name. - Arguments: - name (str) -- The data set name to check if cataloged. - volumes (list[str]) -- The volumes the data set may reside on. - Returns: - bool -- If any action was performed on the data. - bool -- If the dataset is still present. - bool -- If given the volumes list and dataset name we need to continue with deleting the dataset as usual, - either by cataloging it and deleting or deleting a cataloged dataset. - """ - changed = False - present = True - pending_to_delete_cataloged_dataset = False - # Get the list of volumes that the dataset is catalogued in. - cataloged_volume_list = DataSet.get_volume_list_for_cataloged_data_set(name) - if len(cataloged_volume_list) == 0: - return changed, present, True - - # If a volume provided (volumes) is not in the list cataloged_volume_list, we need to - # delete them from the cataloged_volume_list, this leaves us with with uncataloged data sets that - # correspond to the volumes argument. - volumes_for_uncataloged_dataset = list(filter(lambda vol: vol not in cataloged_volume_list, volumes)) - - # If any volume provided (volumes) is in the list we will delete from catalog as normal. - pending_to_delete_cataloged_dataset = any(vol in volumes for vol in cataloged_volume_list) - - if len(volumes_for_uncataloged_dataset) > 0: - volumes = list(filter(lambda vol: DataSet._is_in_vtoc(name, vol), volumes)) - if len(volumes) > 0: - present = DataSet.delete_uncataloged_dataset(name, volumes) - changed = present == 0 - else: - changed = False - - return changed, present, pending_to_delete_cataloged_dataset - @staticmethod def get_member_name_from_file(file_name): """Creates a member name for a partitioned data set by taking up to the @@ -623,7 +513,7 @@ def data_set_volume(name): @staticmethod def data_set_type(name, volume=None): - """Checks the type of a data set. + """Checks the type of a data set, data sets must be cataloged. Arguments: name (str) -- The name of the data set. @@ -783,6 +673,114 @@ def attempt_catalog_if_necessary(name, volumes): present = True return present, changed + @staticmethod + def attempt_catalog_if_necessary_and_delete(name, volumes): + """Attempts to catalog a data set if not already cataloged, then deletes + the data set. + This is helpful when a data set currently cataloged is not the data + set needing to be deleted, meaning the one in the provided volumes + is needing to be deleted.. Recall, you can have a data set in + two different volumes, and only one cataloged. + + Arguments: + name (str) -- The name of the data set. + volumes (list[str]) -- The volumes the data set may reside on. + + Returns: + changed (bool) -- Whether changes were made. + present (bool) -- Whether the data set is now present. + """ + + changed = False + present = True + + if volumes: + # Check if the data set is cataloged + present = DataSet.data_set_cataloged(name) + + if present: + # Data set is cataloged, now check it its cataloged on the provided volumes + # If it is, we just delete because the DS is the right one wanting deletion. + present = DataSet.data_set_cataloged(name, volumes) + + if present: + DataSet.delete(name) + changed = True + present = False + else: + # It appears that what is in catalog does not match the provided + # volumes, therefore the user wishes we delete a data set on a + # particular volue, NOT what is in catalog. + # for the provided volumes + + # We need to identify the volumes where the current cataloged data set + # is located for use later when we recatalog. Code is strategically + # placed before the uncatalog. + cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name) + + try: + DataSet.uncatalog(name) + except DatasetUncatalogError: + return changed, present + + # Catalog the data set for the provided volumes + try: + DataSet.catalog(name, volumes) + except DatasetCatalogError: + try: + # A failure, so recatalog the original data set on the original volumes + DataSet.catalog(name,cataloged_volume_list_original) + except DatasetCatalogError: + pass + return changed, present + + # Check the recatalog, ensure it cataloged before we try to remove + present = DataSet.data_set_cataloged(name, volumes) + + if present: + try: + DataSet.delete(name) + except DatasetDeleteError: + try: + DataSet.uncatalog(name) + except DatasetUncatalogError: + try: + DataSet.catalog(name,cataloged_volume_list_original) + except DatasetCatalogError: + pass + return changed, present + try: + DataSet.catalog(name,cataloged_volume_list_original) + changed = True + present = False + except DatasetCatalogError: + changed = True + present = False + return changed, present + else: + try: + DataSet.catalog(name, volumes) + except DatasetCatalogError: + return changed, present + + present = DataSet.data_set_cataloged(name, volumes) + + if present: + DataSet.delete(name) + changed = True + present = False + else: + present = DataSet.data_set_cataloged(name, None) + if present: + try: + DataSet.delete(name) + changed = True + present = False + except DatasetDeleteError: + return changed, present + + return changed, present + @staticmethod def _is_in_vtoc(name, volume): """Determines if data set is in a volume's table of contents. @@ -1115,25 +1113,53 @@ def _catalog_vsam(name, volumes): data_set_name = name.upper() success = False command_rc = 0 - for data_set_type in ["", "LINEAR", "INDEXED", "NONINDEXED", "NUMBERED"]: - if data_set_type != "INDEXED": - command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( - data_set_name, - DataSet._build_volume_string_idcams(volumes), - data_set_type, - ) - else: - command = DataSet._VSAM_CATALOG_COMMAND_INDEXED.format( - data_set_name, - DataSet._build_volume_string_idcams(volumes), - data_set_type, - ) - command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command + command ="" + + # In order to catalog a uncataloged data set, we can't rely on LISTCAT + # so using the VTOC entries we can make some assumptions of if the data set + # is indexed, linear etc. + ds_vtoc_data_entry = vtoc.get_data_set_entry(name+".DATA", volumes[0]) + ds_vtoc_index_entry = vtoc.get_data_set_entry(name+".INDEX", volumes[0]) + + if ds_vtoc_data_entry and ds_vtoc_index_entry: + data_set_type_vsam = "INDEXED" + else: + data_set_type_vsam = "NONINDEXED" + + if data_set_type_vsam != "INDEXED": + command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( + data_set_name, + DataSet._build_volume_string_idcams(volumes), + data_set_type_vsam, + ) + else: + command = DataSet._VSAM_CATALOG_COMMAND_INDEXED.format( + data_set_name, + DataSet._build_volume_string_idcams(volumes), + data_set_type_vsam, + ) + + command_rc, stdout, stderr = module.run_command( + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command) + + if command_rc == 0: + success = True + # break + + if not success: + # Liberty taken such that here we can assume its a LINEAR VSAM + command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( + data_set_name, + DataSet._build_volume_string_idcams(volumes), + "LINEAR", ) + + command_rc, stdout, stderr = module.run_command( + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command) + if command_rc == 0: success = True - break + if not success: raise DatasetCatalogError( name, diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index c4833aa56..2eae7de27 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -34,8 +34,8 @@ ("lds"), ] -DEFAULT_VOLUME = "000000" -DEFAULT_VOLUME2 = "222222" +VOLUME_000000 = "000000" +VOLUME_222222 = "222222" DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" DEFAULT_DATA_SET_NAME_WITH_MEMBER = "USER.PRIVATE.TESTDS(TESTME)" TEMP_PATH = "/tmp/jcl" @@ -140,6 +140,9 @@ def retrieve_data_set_names(results): data_set_names.append(name) return data_set_names +def print_results(results): + for result in results.contacted.values(): + pprint(result) @pytest.mark.parametrize( "jcl", @@ -149,7 +152,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @@ -160,13 +163,10 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): ) # verify data set creation was successful for result in results.contacted.values(): - pprint(result) if(result.get("jobs")[0].get("ret_code") is None): submitted_job_id = result.get("jobs")[0].get("job_id") assert submitted_job_id is not None results = hosts.all.zos_job_output(job_id=submitted_job_id) - print("Getting failed JOB") - pprint(vars(results)) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # verify first uncatalog was performed results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") @@ -178,13 +178,13 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): assert result.get("changed") is False # recatalog the data set results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is True # verify second catalog shows catalog already performed results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -192,7 +192,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): # clean up hosts.all.file(path=TEMP_PATH, state="absent") # Added volumes to force a catalog in case they were somehow uncataloged to avoid an duplicate on volume error - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2]) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=[VOLUME_000000, VOLUME_222222]) @pytest.mark.parametrize( @@ -203,9 +203,10 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -216,7 +217,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -226,13 +227,13 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): assert result.get("changed") is True # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=DEFAULT_VOLUME) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) @pytest.mark.parametrize( @@ -243,9 +244,10 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -256,7 +258,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -268,7 +270,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", - volumes=DEFAULT_VOLUME, + volumes=VOLUME_000000, replace=True, ) for result in results.contacted.values(): @@ -286,9 +288,10 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -303,7 +306,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): assert result.get("changed") is True # ensure data set absent results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="absent", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is True @@ -318,40 +321,44 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): ) def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl): hosts = ansible_zos_module - hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME - ) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) - results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True - ) + hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + # verify data set creation was successful for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + # uncatalog the data set results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True + # Create the same dataset name in different volume - jcl = jcl.replace(DEFAULT_VOLUME, DEFAULT_VOLUME2) + jcl = jcl.replace(VOLUME_000000, VOLUME_222222) + hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) - results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True - ) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + # verify data set creation was successful for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + hosts.all.file(path=TEMP_PATH, state="absent") + # ensure data set absent - results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="absent", volumes=DEFAULT_VOLUME - ) + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) + for result in results.contacted.values(): + assert result.get("changed") is True + + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") for result in results.contacted.values(): assert result.get("changed") is True - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @pytest.mark.parametrize("dstype", data_set_types) @@ -499,7 +506,7 @@ def test_data_member_force_delete(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True - # add members + #add members results = hosts.all.zos_data_set( batch=[ { @@ -672,7 +679,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) space_primary=5, space_type="CYL", record_length=15, - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -686,7 +693,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -706,7 +713,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): key_offset=0, space_primary=5, space_type="CYL", - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -720,7 +727,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -738,7 +745,7 @@ def test_data_set_old_aliases(ansible_zos_module): state="present", format="fb", size="5m", - volume=DEFAULT_VOLUME, + volume=VOLUME_000000, ) for result in results.contacted.values(): assert result.get("changed") is True From 9ce6eefd182dc28ebd1b16566fd3786d98bb4b20 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:21:14 -0700 Subject: [PATCH 122/413] Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_data_set.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index dde8f3488..bcf468599 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -310,6 +310,13 @@ found in the catalog, the module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with I(changed=False). + - > + If I(state=absent) and I(volumes) is provided, and the data set is found in + the catalog, the module compares the catalog volume attributes to the provided + I(volumes). If they volume attributes are different, the cataloged data set + will be uncataloged temporarily while the requested data set be deleted is cataloged. + The module will catalog the original data set on completion, if the attempts to + catalog fail, no action is taken. Module completes successfully with I(changed=False). - > If I(state=present) and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with I(changed=True). From 6c2669f23d3c7ecbd84ae54590ec6ba8802d6cbd Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:24:50 -0700 Subject: [PATCH 123/413] Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_data_set.rst | 6 ++++++ plugins/modules/zos_data_set.py | 7 +++++++ 2 files changed, 13 insertions(+) diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index c310069e8..8415694b0 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -56,6 +56,9 @@ state If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. @@ -324,6 +327,9 @@ batch If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index bcf468599..b279d40d2 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -57,6 +57,13 @@ found in the catalog, the module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with I(changed=False). + - > + If I(state=absent) and I(volumes) is provided, and the data set is found in + the catalog, the module compares the catalog volume attributes to the provided + I(volumes). If they volume attributes are different, the cataloged data set + will be uncataloged temporarily while the requested data set be deleted is cataloged. + The module will catalog the original data set on completion, if the attempts to + catalog fail, no action is taken. Module completes successfully with I(changed=False). - > If I(state=present) and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with I(changed=True). From 328dd16bb687fe042bf2981376914ad33b20a39e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:29:40 -0700 Subject: [PATCH 124/413] Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_data_set_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 2eae7de27..63301d2e8 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -327,7 +327,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) - hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) # verify data set creation was successful for result in results.contacted.values(): From d52c2a8996a5043c38a030568fd030eb8d75fa45 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:50:03 -0700 Subject: [PATCH 125/413] added changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --- .../791-doc-zos_data_set-member-update.yml | 5 ----- .../fragments/791-zos_data_set-update-vsam.yml | 17 +++++++++++++++++ 2 files changed, 17 insertions(+), 5 deletions(-) delete mode 100644 changelogs/fragments/791-doc-zos_data_set-member-update.yml create mode 100644 changelogs/fragments/791-zos_data_set-update-vsam.yml diff --git a/changelogs/fragments/791-doc-zos_data_set-member-update.yml b/changelogs/fragments/791-doc-zos_data_set-member-update.yml deleted file mode 100644 index 4ab0eee03..000000000 --- a/changelogs/fragments/791-doc-zos_data_set-member-update.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: -- zos_data_set - when a member is created by the module, the format is type - data which is not suitable for executables. This change describes the - format used when creating member. - (https://github.com/ansible-collections/ibm_zos_core/pull/791) \ No newline at end of file diff --git a/changelogs/fragments/791-zos_data_set-update-vsam.yml b/changelogs/fragments/791-zos_data_set-update-vsam.yml new file mode 100644 index 000000000..00ec67495 --- /dev/null +++ b/changelogs/fragments/791-zos_data_set-update-vsam.yml @@ -0,0 +1,17 @@ +trivial: +- zos_data_set - when a member is created by the module, the format is type + data which is not suitable for executables. This change describes the + format used when creating member. + (https://github.com/ansible-collections/ibm_zos_core/pull/791) +- ac - Reported an issue when functional tests ran leaving behind files. Fix + now removes the unwanted files. + (https://github.com/ansible-collections/ibm_zos_core/pull/791) +bugfixes: +- zos_copy - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical + flow that will evaluate the volumes, compare it to the provided value and + if necessary catalog and delete. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). +- module_utils - data_set.py - Reported a failure caused when cataloging a + VSAM data set. Fix now corrects how VSAM data sets are cataloged. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file From acdd1bbce04593ae9a61387d38e256de972c9ad8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 17:08:06 -0700 Subject: [PATCH 126/413] Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index f1aa1ee22..f1d8152a8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -210,9 +210,7 @@ def ensure_absent(name, volumes=None): Returns: changed (bool) -- Indicates if changes were made. """ - - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) - + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) return changed # ? should we do additional check to ensure member was actually created? @@ -350,7 +348,7 @@ def data_set_cataloged(name, volumes=None): if volumes: cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] - if bool(set(volumes) & set (cataloged_volume_list)): + if bool(set(volumes) & set(cataloged_volume_list)): return True else: if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): @@ -729,7 +727,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): except DatasetCatalogError: try: # A failure, so recatalog the original data set on the original volumes - DataSet.catalog(name,cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original) except DatasetCatalogError: pass return changed, present @@ -745,12 +743,12 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): DataSet.uncatalog(name) except DatasetUncatalogError: try: - DataSet.catalog(name,cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original) except DatasetCatalogError: pass return changed, present try: - DataSet.catalog(name,cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original) changed = True present = False except DatasetCatalogError: @@ -1113,13 +1111,13 @@ def _catalog_vsam(name, volumes): data_set_name = name.upper() success = False command_rc = 0 - command ="" + command = "" # In order to catalog a uncataloged data set, we can't rely on LISTCAT # so using the VTOC entries we can make some assumptions of if the data set # is indexed, linear etc. - ds_vtoc_data_entry = vtoc.get_data_set_entry(name+".DATA", volumes[0]) - ds_vtoc_index_entry = vtoc.get_data_set_entry(name+".INDEX", volumes[0]) + ds_vtoc_data_entry = vtoc.get_data_set_entry(name + ".DATA", volumes[0]) + ds_vtoc_index_entry = vtoc.get_data_set_entry(name + ".INDEX", volumes[0]) if ds_vtoc_data_entry and ds_vtoc_index_entry: data_set_type_vsam = "INDEXED" From 55dd8710a8bea1bc6a6a0b9a218bb4be21c2e451 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 10 Jun 2023 22:44:42 -0700 Subject: [PATCH 127/413] Update changelog based on PR feedback Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/791-zos_data_set-update-vsam.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/changelogs/fragments/791-zos_data_set-update-vsam.yml b/changelogs/fragments/791-zos_data_set-update-vsam.yml index 00ec67495..c4ad9901c 100644 --- a/changelogs/fragments/791-zos_data_set-update-vsam.yml +++ b/changelogs/fragments/791-zos_data_set-update-vsam.yml @@ -7,11 +7,11 @@ trivial: now removes the unwanted files. (https://github.com/ansible-collections/ibm_zos_core/pull/791) bugfixes: -- zos_copy - Reported a failure caused when `present=absent` for a VSAM +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). + (https://github.com/ansible-collections/ibm_zos_core/pull/791). - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/791). \ No newline at end of file From 6c704574d5da8191234b507cca25dddef4ce68b8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 11 Jun 2023 12:44:54 -0700 Subject: [PATCH 128/413] Increase ansible supported version to 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> --- meta/runtime.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meta/runtime.yml b/meta/runtime.yml index dbba1c7ce..37ae81b1f 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9,<2.15' +requires_ansible: '>=2.9,<2.16' From 923553931c51b0fca14a4a2c585a31991e9a52cf Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 11 Jun 2023 12:45:26 -0700 Subject: [PATCH 129/413] remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index f1d8152a8..647f26871 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -13,7 +13,6 @@ __metaclass__ = type -import pprint import re import tempfile from os import path, walk @@ -40,11 +39,9 @@ vtoc = MissingImport("vtoc") try: - from zoautil_py import datasets, mvscmd, types + from zoautil_py import datasets except ImportError: datasets = MissingZOAUImport() - mvscmd = MissingZOAUImport() - types = MissingZOAUImport() class DataSet(object): From 5f9ef2352967c07869eaaea9efde4e32a71086bd Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 11 Jun 2023 12:50:44 -0700 Subject: [PATCH 130/413] Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/sanity/ignore-2.16.txt | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 tests/sanity/ignore-2.16.txt diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt new file mode 100644 index 000000000..8b4540038 --- /dev/null +++ b/tests/sanity/ignore-2.16.txt @@ -0,0 +1,35 @@ +plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. +plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. +plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion +plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin +plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. +plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. +plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. +plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin +plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin +plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited +plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 494832e7df2458c11cc4800e9db43ddf57cee8ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 12 Jun 2023 16:36:02 -0600 Subject: [PATCH 131/413] Change the line for the functional one (#805) --- tests/functional/modules/test_zos_copy_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 374bf2b47..86d5e7564 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -545,7 +545,7 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ assert result.get("stdout") == DUMMY_DATA finally: hosts.all.file(name=dest_path, state="absent") - source_1.cleanup(ignore_cleanup_errors = True) + source_1.cleanup() @pytest.mark.uss From a9507f6e24655cf191c8442da5abe5348afafb3a Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 16 Jun 2023 13:50:27 -0700 Subject: [PATCH 132/413] Add ansible-lint tooling added (#812) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> * add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * missing hyphen from command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command with doc corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .ansible-lint | 35 +++++++++++++++++++++++ ac | 28 +++++++++++++++--- changelogs/fragments/812-ansible-lint.yml | 4 +++ galaxy.yml | 3 ++ meta/runtime.yml | 2 +- 5 files changed, 67 insertions(+), 5 deletions(-) create mode 100644 .ansible-lint create mode 100644 changelogs/fragments/812-ansible-lint.yml diff --git a/.ansible-lint b/.ansible-lint new file mode 100644 index 000000000..a83129368 --- /dev/null +++ b/.ansible-lint @@ -0,0 +1,35 @@ +exclude_paths: + - .tar.gz + - __pycache__/ + - .cache/ + - .DS_Store + - .git/ + - .github/ + - .gitignore + - .python-version + - .pytest_cache/ + - .vscode/ + - Jenkinsfile + - ac + - ansible.cfg + - changelogs/ + - collections/ + - docs/ + - scripts/ + - test_config.yml + - tests/*.ini + - tests/*.py + - tests/.pytest_cache + - tests/__pycache__ + - tests/functional + - tests/helpers + - tests/requirements.txt + - tests/unit + - tests/sanity/ignore-2.9.txt + - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt + - venv* +parseable: true +quiet: false +use_default_rules: true +verbosity: 1 diff --git a/ac b/ac index 3d0394cac..1d06757f5 100755 --- a/ac +++ b/ac @@ -205,8 +205,8 @@ option_sanitize(){ # ------------------------------------------------------------------------------ #->ac-bandit: ## Run a bandit security scan on the plugins directory, set the severity level. -## Usage: ac [-s <level>, --bandit <level>] -## Usage: ac [-s <level>, --bandit <level>] +## Usage: ac [--bandit <level>] +## Usage: ac [--bandit <level>] ## <level> - choose from 'l', 'll', 'lll' ## - l all low, medium, high severity ## - ll all medium, high severity @@ -230,7 +230,7 @@ ac_bandit(){ # ------------------------------------------------------------------------------ #->ac-build: ## Build and install collection of the local GH branch. -## Usage: ac [-b, --ac-build] +## Usage: ac [--ac-build] ## Example: ## $ ac --ac-build ac_build(){ @@ -265,13 +265,27 @@ ac_install(){ fi } +# ------------------------------------------------------------------------------ +# Run ansible-lint on the locally checked out GH Branch +# ------------------------------------------------------------------------------ +#->ac-lint: +## Run ansible-lint on the local GH branch with the production profile. +## Usage: ac [--ac-lint] +## Example: +## $ ac --ac-lint +ac_ansible_lint(){ + gh_branch=`git branch |grep "*" | cut -d" " -f2` + message "Linting with ansible-lint on GH branch: '$gh_branch'." + . $VENV_BIN/activate && $VENV_BIN/ansible-lint --profile production +} + # ------------------------------------------------------------------------------ # Run the sanity test using docker given python version else default to venv # ------------------------------------------------------------------------------ #->ac-sanity: ## Run ansible-test in docker if the docker engine is running, else run them in ## a managed virtual environment using the installed python version. -## Usage: ac [--ac-lint] [--version <version>] +## Usage: ac [--ac-sanity] [--version <version>] ## Options: ## <version> - Only applies to when docker is running. ## - No version selection will run all available python versions in docker. @@ -644,6 +658,10 @@ while true; do ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" ;; + --ac-lint) + ensure_managed_venv_exists $1 # Command + option_submitted="--ac-lint" + ;; --ac-sanity |--ac-sanity=?*) # Command ensure_managed_venv_exists $1 option_submitted="--ac-sanity" @@ -785,6 +803,8 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then + ac_ansible_lint elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then ac_sanity $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then diff --git a/changelogs/fragments/812-ansible-lint.yml b/changelogs/fragments/812-ansible-lint.yml new file mode 100644 index 000000000..0cb520884 --- /dev/null +++ b/changelogs/fragments/812-ansible-lint.yml @@ -0,0 +1,4 @@ +trivial: +- ansible-lint - enabling ansible-lint for 2.15 and Ansible Automation Platform + certification. + (https://github.com/ansible-collections/ibm_zos_core/pull/812) diff --git a/galaxy.yml b/galaxy.yml index e4b998278..fad6b8f5c 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -90,4 +90,7 @@ build_ignore: - tests/helpers - tests/requirements.txt - tests/unit + - tests/sanity/ignore-2.9.txt + - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt - venv* diff --git a/meta/runtime.yml b/meta/runtime.yml index 37ae81b1f..9f9843979 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9,<2.16' +requires_ansible: '>=2.12.00' From ee0dfd4c820e70ad02319aeb7f11d2985b169ce8 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 23 Jun 2023 13:19:53 -0400 Subject: [PATCH 133/413] 439 addf (#821) * initial changes to support F format * adding F option, added basic test looking for failure during ensure-present * added print_results to a failing uncatalog test. * adding more preint_result statements to track down cataloging issue * removed other print statements, added one back (cataloging is just plain finicky) * corrected volume name on new test * removed extra print statement from test code. Added Changelog fragment. * Expanded test case to try 1 of each record format creation. Added mention of 'F' into the documentation of record_format in dataset.py --- changelogs/fragments/439-add-f-recordtype.yml | 4 ++++ plugins/module_utils/data_set.py | 8 +++---- plugins/modules/zos_data_set.py | 3 +++ .../modules/test_zos_data_set_func.py | 21 +++++++++++++++++++ 4 files changed, 32 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/439-add-f-recordtype.yml diff --git a/changelogs/fragments/439-add-f-recordtype.yml b/changelogs/fragments/439-add-f-recordtype.yml new file mode 100644 index 000000000..6c5e72f49 --- /dev/null +++ b/changelogs/fragments/439-add-f-recordtype.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_data_set - record format = 'F' has been added to support 'fixed' block records. + This allows records that can use the entire block. + (https://github.com/ansible-collections/ibm_zos_core/pull/821) \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 647f26871..69e1190f1 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -124,7 +124,7 @@ def ensure_present( space_type (str, optional): The unit of measurement to use when defining primary and secondary space. Defaults to None. record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to None. record_length (int, optional) The length, in bytes, of each record in the data set. Defaults to None. @@ -832,7 +832,7 @@ def replace( space_type (str, optional): The unit of measurement to use when defining primary and secondary space. Defaults to None. record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to None. record_length (int, optional) The length, in bytes, of each record in the data set. Defaults to None. @@ -949,7 +949,7 @@ def create( space_type (str, optional): The unit of measurement to use when defining primary and secondary space. Defaults to None. record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to None. record_length (int, optional) The length, in bytes, of each record in the data set. Defaults to None. @@ -1317,7 +1317,7 @@ def create_temp( Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. Defaults to "SEQ". record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to "FB". space_primary (int, optional): The amount of primary space to allocate for the dataset. Defaults to 5. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index b279d40d2..ca6012978 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -168,6 +168,7 @@ - FBA - VBA - U + - F type: str default: FB sms_storage_class: @@ -428,6 +429,7 @@ - FBA - VBA - U + - F type: str default: FB sms_storage_class: @@ -698,6 +700,7 @@ "FBA", "VBA", "U", + "F", ] DEFAULT_RECORD_LENGTHS = { diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 63301d2e8..0a3972646 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -907,3 +907,24 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): finally: if dsname: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + +@pytest.mark.parametrize( + "formats", + ["F","FB", "VB", "FBA", "VBA", "U"], +) +def test_data_set_f_formats(ansible_zos_module, formats): + try: + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results = hosts.all.zos_data_set( + name=DEFAULT_DATA_SET_NAME, + state="present", + format=formats, + size="5m", + volume=VOLUME_000000, + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From b37ab341176b9c8447cc72c558988d37fb2d5842 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 23 Jun 2023 14:54:54 -0600 Subject: [PATCH 134/413] Bugfix/769/mode option does not behave the same way that it does in the community module (#795) * First suggestion * Add files to be overwriten to the files to be changed * Add functionality to test case to ensure behaivour * Add test case for keep behaivour * Delete test repetition * Delete test case from other branch * Change test cases to ensure works as ansible module * Add fragment and change variable names for clarity * Get better test case and comments * Restore test --------- Co-authored-by: Demetri <dimatos@gmail.com> --- .../795_overwrite_permissions_on_copy.yml | 4 ++ plugins/modules/zos_copy.py | 19 +++--- .../functional/modules/test_zos_copy_func.py | 59 +++++++------------ 3 files changed, 35 insertions(+), 47 deletions(-) create mode 100644 changelogs/fragments/795_overwrite_permissions_on_copy.yml diff --git a/changelogs/fragments/795_overwrite_permissions_on_copy.yml b/changelogs/fragments/795_overwrite_permissions_on_copy.yml new file mode 100644 index 000000000..2a8d826d7 --- /dev/null +++ b/changelogs/fragments/795_overwrite_permissions_on_copy.yml @@ -0,0 +1,4 @@ +bugfixes: +- zos_copy - kept permissions on target directory when copy overwrote + files. The fix now set permissions when mode is given. + (https://github.com/ansible-collections/ibm_zos_core/pull/795) \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ffb9ccbe4..831b2db87 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1241,27 +1241,30 @@ def _get_changed_files(self, src, dest, copy_directory): for the files and directories already present on the destination. """ - copied_files = self._walk_uss_tree(src) + files_to_copy = self._walk_uss_tree(src) # It's not needed to normalize the path because it was already normalized # on _copy_to_dir. parent_dir = os.path.basename(src) if copy_directory else '' - changed_files = [] - original_files = [] - for relative_path in copied_files: + files_to_change = [] + existing_files = [] + for relative_path in files_to_copy: if os.path.exists(os.path.join(dest, parent_dir, relative_path)): - original_files.append(relative_path) + existing_files.append(relative_path) else: - changed_files.append(relative_path) + files_to_change.append(relative_path) + # This change adds to the files_to_change variable any file that accord with + # a name found in the source copy. + files_to_change.extend(existing_files) # Creating tuples with (filename, permissions). original_permissions = [ (filepath, os.stat(os.path.join(dest, parent_dir, filepath)).st_mode) - for filepath in original_files + for filepath in existing_files ] - return changed_files, original_permissions + return files_to_change, original_permissions def _walk_uss_tree(self, dir): """Walks the tree directory for dir and returns all relative paths diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 86d5e7564..41efc4ee4 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -17,6 +17,7 @@ import os import shutil import re +import time import tempfile from tempfile import mkstemp @@ -933,7 +934,7 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): for result in stat_overwritten_file_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is False - assert result.get("stat").get("mode") == dest_mode + assert result.get("stat").get("mode") == mode for result in stat_new_file_res.contacted.values(): assert result.get("stat").get("exists") is True @@ -1027,7 +1028,7 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): for result in stat_overwritten_file_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is False - assert result.get("stat").get("mode") == dest_mode + assert result.get("stat").get("mode") == mode for result in stat_new_file_res.contacted.values(): assert result.get("stat").get("exists") is True @@ -1110,52 +1111,32 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, src): hosts = ansible_zos_module dest_path = "/tmp/test/" + mode = "750" + other_mode = "744" + mode_overwrite = "0777" + full_path = "{0}/profile".format(dest_path) try: - hosts.all.file(path=dest_path, state="directory", mode="750") - permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) - hosts.all.zos_copy(content=src["src"], dest=dest_path) - permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + hosts.all.file(path=dest_path, state="directory", mode=mode) + permissions_before = hosts.all.stat(path=dest_path) + hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=other_mode) + permissions = hosts.all.stat(path=dest_path) for before in permissions_before.contacted.values(): - permissions_be_copy = before.get("stdout") - - for after in permissions.contacted.values(): - permissions_af_copy = after.get("stdout") - - permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] - permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] - - assert permissions_be_copy == permissions_af_copy - finally: - hosts.all.file(path=dest_path, state="absent") - - -@pytest.mark.uss -@pytest.mark.parametrize("src", [ - dict(src="/etc/", is_remote=False), - dict(src="/etc/", is_remote=True),]) -def test_ensure_copy_directory_does_not_change_permission_on_dest(ansible_zos_module, src): - hosts = ansible_zos_module - dest_path = "/tmp/test/" - try: - hosts.all.file(path=dest_path, state="directory", mode="750") - permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) - hosts.all.zos_copy(content=src["src"], dest=dest_path) - permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) - - for before in permissions_before.contacted.values(): - permissions_be_copy = before.get("stdout") + permissions_be_copy = before.get("stat").get("mode") for after in permissions.contacted.values(): - permissions_af_copy = after.get("stdout") + permissions_af_copy = after.get("stat").get("mode") - permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] - permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] - assert permissions_be_copy == permissions_af_copy + + # Extra asserts to ensure change mode rewrite a copy + hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=mode_overwrite) + permissions_overwriten = hosts.all.stat(path = full_path) + for over in permissions_overwriten.contacted.values(): + assert over.get("stat").get("mode") == mode_overwrite finally: hosts.all.file(path=dest_path, state="absent") - + @pytest.mark.uss @pytest.mark.seq From 04486401c2e89cf9f2e86d3c2c9afda4211b048e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 27 Jun 2023 11:13:16 -0600 Subject: [PATCH 135/413] bugfix/823/Return destination attributes had hardcoded type and record format (#824) * Add solution * Add fragment --- ...nation_attributes_had_hardcoded_type_and_recordformat.yml | 5 +++++ plugins/modules/zos_copy.py | 4 ++++ 2 files changed, 9 insertions(+) create mode 100644 changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml diff --git a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml new file mode 100644 index 000000000..8f4246f85 --- /dev/null +++ b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml @@ -0,0 +1,5 @@ +bugfix: +- zos_copy - Module returned the dynamic values created with the same dataset type + and record format. Fix validate the correct dataset type and record format of + target created. + (https://github.com/ansible-collections/ibm_zos_core/pull/824) \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 831b2db87..fd7c8feac 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2191,6 +2191,10 @@ def allocate_destination_data_set( data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) if dest_ds_type not in data_set.DataSet.MVS_VSAM: dest_params = get_attributes_of_any_dataset_created(dest, src_ds_type, src, src_name, is_binary, volume) + dest_attributes = datasets.listing(dest)[0] + record_format = dest_attributes.recfm + dest_params["type"] = dest_ds_type + dest_params["record_format"] = record_format return True, dest_params From 5703246635e0deb84d4eba31359b6d5004216da7 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 27 Jun 2023 10:31:17 -0700 Subject: [PATCH 136/413] Bufix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#806) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Changlog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> * remove verbosity from test Signed-off-by: ddimatos <dimatos@gmail.com> * correct indentation Signed-off-by: ddimatos <dimatos@gmail.com> * update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../791-zos_data_set-update-vsam-copy.yml | 11 ++ ...=> 806-zos_copy_fetch-display-verbose.yml} | 0 plugins/action/zos_copy.py | 9 +- plugins/action/zos_fetch.py | 9 +- tests/conftest.py | 13 +- .../functional/modules/test_zos_copy_func.py | 155 ++++++++++++++---- tests/helpers/ztest.py | 4 + 7 files changed, 160 insertions(+), 41 deletions(-) create mode 100644 changelogs/fragments/791-zos_data_set-update-vsam-copy.yml rename changelogs/fragments/{791-zos_data_set-update-vsam.yml => 806-zos_copy_fetch-display-verbose.yml} (100%) diff --git a/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml b/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml new file mode 100644 index 000000000..3d29e906e --- /dev/null +++ b/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml @@ -0,0 +1,11 @@ +bugfixes: +- zos_copy - Test case for recursive encoding directories reported a + UTF-8 failure. This change ensures proper test coverage for nested + directories and file permissions. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). \ No newline at end of file diff --git a/changelogs/fragments/791-zos_data_set-update-vsam.yml b/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml similarity index 100% rename from changelogs/fragments/791-zos_data_set-update-vsam.yml rename to changelogs/fragments/806-zos_copy_fetch-display-verbose.yml diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 5fa861b61..b98432aa6 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -258,9 +258,10 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): display.vvv(u"ibm_zos_copy return code: {0}".format(returncode), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_copy stdout: {0}".format(stdout), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_copy stderr: {0}".format(stderr), host=self._play_context.remote_addr) - display.vvv(u"play context verbosity: {0}".format(self._play_context.verbosity), host=self._play_context.remote_addr) - err = _detect_sftp_errors(stderr) + ansible_verbosity = None + ansible_verbosity = display.verbosity + display.vvv(u"play context verbosity: {0}".format(ansible_verbosity), host=self._play_context.remote_addr) # ************************************************************************* # # When plugin shh connection member _build_command(..) detects verbosity # @@ -275,7 +276,9 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): # the verbosity is returned as 'stderr'. # # ************************************************************************* # - if self._play_context.verbosity > 3: + err = _detect_sftp_errors(stderr) + + if ansible_verbosity > 3: ignore_stderr = True if returncode != 0 or (err and not ignore_stderr): diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 67bd83981..e10dbd75f 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -343,9 +343,10 @@ def _transfer_remote_content( display.vvv(u"ibm_zos_fetch return code: {0}".format(returncode), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_fetch stdout: {0}".format(stdout), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_fetch stderr: {0}".format(stderr), host=self._play_context.remote_addr) - display.vvv(u"play context verbosity: {0}".format(self._play_context.verbosity), host=self._play_context.remote_addr) - err = _detect_sftp_errors(stderr) + ansible_verbosity = None + ansible_verbosity = display.verbosity + display.vvv(u"play context verbosity: {0}".format(ansible_verbosity), host=self._play_context.remote_addr) # ************************************************************************* # # When plugin shh connection member _build_command(..) detects verbosity # @@ -360,7 +361,9 @@ def _transfer_remote_content( # the verbosity is returned as 'stderr'. # # ************************************************************************* # - if self._play_context.verbosity > 3: + err = _detect_sftp_errors(stderr) + + if ansible_verbosity > 3: ignore_stderr = True if re.findall(r"Permission denied", err): diff --git a/tests/conftest.py b/tests/conftest.py index 980edb332..506214f29 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,7 +38,8 @@ def z_python_interpreter(request): helper = ZTestHelper.from_yaml_file(path) interpreter_str = helper.build_interpreter_string() inventory = helper.get_inventory_info() - yield (interpreter_str, inventory) + python_path = helper.get_python_path() + yield (interpreter_str, inventory, python_path) def clean_logs(adhoc): @@ -62,12 +63,18 @@ def clean_logs(adhoc): def ansible_zos_module(request, z_python_interpreter): """ Initialize pytest-ansible plugin with values from our YAML config and inject interpreter path into inventory. """ - interpreter, inventory = z_python_interpreter + interpreter, inventory, python_path = z_python_interpreter + # next two lines perform similar action to ansible_adhoc fixture plugin = request.config.pluginmanager.getplugin("ansible") adhoc = plugin.initialize(request.config, request, **inventory) - # * Inject our environment + + # Inject our environment hosts = adhoc["options"]["inventory_manager"]._inventory.hosts + + # Courtesy, pass along the python_path for some test cases need this information + adhoc["options"]["ansible_python_path"] = python_path + for host in hosts.values(): host.vars["ansible_python_interpreter"] = interpreter # host.vars["ansible_connection"] = "zos_ssh" diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 41efc4ee4..7b69a75b4 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -20,6 +20,7 @@ import time import tempfile from tempfile import mkstemp +import subprocess __metaclass__ = type @@ -471,45 +472,99 @@ def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): @pytest.mark.uss def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/test/" - text_outer_file = "Hi I am point A" - text_inner_file = "Hi I am point B" - src_path = "/tmp/level_1/" - outer_file = "/tmp/level_1/text_A.txt" - inner_src_path = "/tmp/level_1/level_2/" - inner_file = "/tmp/level_1/level_2/text_B.txt" - - try: - hosts.all.file(path=inner_src_path, state="directory") - hosts.all.file(path=inner_file, state = "touch") - hosts.all.file(path=outer_file, state = "touch") - hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_outer_file, outer_file)) - hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_inner_file, inner_file)) - - copy_res = hosts.all.zos_copy(src=src_path, dest=dest_path, encoding={"from": "ISO8859-1", "to": "IBM-1047"}, remote_src=True) + + # Remote path + path = "/tmp/ansible" + + # Remote src path with original files + src_path = path + "/src" + + # Nested src dirs + src_dir_one = src_path + "/dir_one" + src_dir_two = src_dir_one + "/dir_two" + src_dir_three = src_dir_two + "/dir_three" + + # Nested src IBM-1047 files + src_file_one = src_path + "/dir_one/one.txt" + src_file_two = src_dir_one + "/dir_two/two.txt" + src_file_three = src_dir_two + "/dir_three/three.txt" + + # Remote dest path to encoded files placed + dest_path = path + "/dest" + + # Nested dest UTF-8 files + dst_file_one = dest_path + "/dir_one/one.txt" + dst_file_two = dest_path + "/dir_one/dir_two/two.txt" + dst_file_three = dest_path + "/dir_one/dir_two//dir_three/three.txt" + + # Strings echo'd to files on USS + str_one = "This is file one." + str_two = "This is file two." + str_three = "This is file three." + + # Hex values for expected results, expected used beause pytest-ansible does not allow for delegate_to + # and depending on where the `od` runs, you may face big/little endian issues, so using expected utf-8 + str_one_big_endian_hex="""0000000000 5468 6973 2069 7320 6669 6C65 206F 6E65 +0000000020 2E0A +0000000022""" + + str_two_big_endian_hex="""0000000000 5468 6973 2069 7320 6669 6C65 2074 776F +0000000020 2E0A +0000000022""" + + str_three_big_endian_hex="""0000000000 5468 6973 2069 7320 6669 6C65 2074 6872 +0000000020 6565 2E0A +0000000024""" + + try: + # Ensure clean slate + results = hosts.all.file(path=path, state="absent") + + # Create nested directories + hosts.all.file(path=src_dir_three, state="directory", mode="0755") + + # Touch empty files + hosts.all.file(path=src_file_one, state = "touch") + hosts.all.file(path=src_file_two, state = "touch") + hosts.all.file(path=src_file_three, state = "touch") + + # Echo contents into files (could use zos_lineinfile or zos_copy), echo'ing will + # result in managed node's locale which currently is IBM-1047 + hosts.all.raw("echo '{0}' > '{1}'".format(str_one, src_file_one)) + hosts.all.raw("echo '{0}' > '{1}'".format(str_two, src_file_two)) + hosts.all.raw("echo '{0}' > '{1}'".format(str_three, src_file_three)) + + # Lets stat the deepest nested directory, not necessary to stat all of them + results = hosts.all.stat(path=src_file_three) + for result in results.contacted.values(): + assert result.get("stat").get("exists") is True + + # Nested zos_copy from IBM-1047 to UTF-8 + # Testing src ending in / such that the contents of the src directory will be copied + copy_res = hosts.all.zos_copy(src=src_path+"/", dest=dest_path, encoding={"from": "IBM-1047", "to": "UTF-8"}, remote_src=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - stat_res = hosts.all.stat(path="/tmp/test/level_2/") - for st in stat_res.contacted.values(): - assert st.get("stat").get("exists") is True + # File z/OS dest is now UTF-8, dump the hex value and compare it to an + # expected big-endian version, can't run delegate_to local host so expected + # value is the work around for now. + str_one_od_dst = hosts.all.shell(cmd="od -x {0}".format(dst_file_one)) + str_two_od_dst = hosts.all.shell(cmd="od -x {0}".format(dst_file_two)) + str_three_od_dst = hosts.all.shell(cmd="od -x {0}".format(dst_file_three)) + + for result in str_one_od_dst.contacted.values(): + assert result.get("stdout") == str_one_big_endian_hex + + for result in str_two_od_dst.contacted.values(): + assert result.get("stdout") == str_two_big_endian_hex - full_inner_path = dest_path + "/level_2/text_B.txt" - full_outer_path = dest_path + "/text_A.txt" - inner_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_inner_path)) - outer_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_outer_path)) - for text in outer_file_text_aft_encoding.contacted.values(): - text_outer = text.get("stdout") - for text in inner_file_text_aft_encoding.contacted.values(): - text_inner = text.get("stdout") + for result in str_three_od_dst.contacted.values(): + assert result.get("stdout") == str_three_big_endian_hex - assert text_inner == text_inner_file - assert text_outer == text_outer_file finally: - hosts.all.file(path=src_path, state="absent") - hosts.all.file(path=dest_path, state="absent") + hosts.all.file(path=path, state="absent") @pytest.mark.uss @@ -2849,4 +2904,40 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( assert v_cp.get("rc") == 0 finally: hosts.all.zos_data_set(name=dest, state="absent") - \ No newline at end of file + + +@pytest.mark.parametrize("options", [ + dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", + force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), + dict(src="/etc/profile", dest="/mp/zos_copy_test_profile", force=True, + is_remote=False, verbosity="-vvvv", verbosity_level=4), + dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", + force=True, is_remote=False, verbosity="", verbosity_level=0), +]) +def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): + """Test the display verbosity, ensure it matches the verbosity_level. + This test requires access to verbosity and pytest-ansbile provides no + reasonable handle for this so for now subprocess is used. This test + results in no actual copy happening, the interest is in the verbosity""" + + try: + hosts = ansible_zos_module + user = hosts["options"]["user"] + # Optionally hosts["options"]["inventory_manager"].list_hosts()[0] + node = hosts["options"]["inventory"].rstrip(',') + python_path = hosts["options"]["ansible_python_path"] + + # This is an adhoc command, because there was no + cmd = "ansible all -i " + str(node) + ", -u " + user + " -m ibm.ibm_zos_core.zos_copy -a \"src=" + options["src"] + " dest=" + options["dest"] + " is_remote=" + str( + options["is_remote"]) + " encoding={{enc}} \" -e '{\"enc\":{\"from\": \"ISO8859-1\", \"to\": \"IBM-1047\"}}' -e \"ansible_python_interpreter=" + python_path + "\" " + options["verbosity"] + "" + + result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout + output = result.read().decode() + + if options["verbosity_level"] != 0: + assert ("play context verbosity: "+ str(options["verbosity_level"])+"" in output) + else: + assert ("play context verbosity:" not in output) + + finally: + hosts.all.file(path=options["dest"], state="absent") diff --git a/tests/helpers/ztest.py b/tests/helpers/ztest.py index 4115da106..af198d6f0 100644 --- a/tests/helpers/ztest.py +++ b/tests/helpers/ztest.py @@ -64,3 +64,7 @@ def build_interpreter_string(self): interpreter_string += "export {0}={1} ; ".format(key, value) interpreter_string += self._python_path return interpreter_string + + def get_python_path(self): + """ Returns python path """ + return self._python_path From f3eae2c45ad465d472f1301b258de6339c18765d Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 6 Jul 2023 08:47:49 -0700 Subject: [PATCH 137/413] Stagging v1.6.0 merge into dev (#832) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * update zoau version checker and add unit testing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Bugfix/769/1.6/zos copy does not overwrite permission on overwriten copy as comunity module (#790) * Change function behaivour, variables names and add test case to ensure result * Change test assertion to ensure wokrs as ansible module * Change test for the HSF * Add fragment * More readable test and better coments * add changelog fragment for zoau version checker bugfix (#800) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Add ansible-lint tooling added (#813) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: ac galaxy.yml meta/runtime.yml Changes to be committed: new file: .ansible-lint new file: ac modified: galaxy.yml modified: meta/runtime.yml * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Removing file brought it in by cherry pick Signed-off-by: ddimatos <dimatos@gmail.com> * Added missing keyword 'build_ignore' Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix for #807 - zos_copy ignores encoding for binary files (#810) * Updated normalization condition * Added test cases for bugfix * Added changelog fragment * Updated changelog fragment * Update zos_data_set member description created (#816) * Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> * Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> * Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update grammar issue Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#814) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: deleted: tests/functional/modules/test_module_display.py modified: tests/functional/modules/test_zos_copy_func.py * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: modified: tests/functional/modules/test_zos_copy_func.py * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: modified: tests/functional/modules/test_zos_copy_func.py * changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment change Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Release tasks v1.6.0 (#829) * Galaxy for 1.6 Signed-off-by: ddimatos <dimatos@gmail.com> * Update ansible-core meta runtime Signed-off-by: ddimatos <dimatos@gmail.com> * Update collections private meta Signed-off-by: ddimatos <dimatos@gmail.com> * Missing trailing empty line Signed-off-by: ddimatos <dimatos@gmail.com> * Update readme with volume init info Signed-off-by: ddimatos <dimatos@gmail.com> * Update lint to match galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog summary Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog Signed-off-by: ddimatos <dimatos@gmail.com> * Minor manual CHANGELOG updates Signed-off-by: ddimatos <dimatos@gmail.com> * Relase notes updated Signed-off-by: ddimatos <dimatos@gmail.com> * Update build_ignore to skip importer_result.json Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up changelog fragements no longer need post releasing 1.6 Signed-off-by: ddimatos <dimatos@gmail.com> * delete older profile, not needed Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- .ansible-lint | 2 - CHANGELOG.rst | 21 ++-- README.md | 5 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 38 ++++++ changelogs/fragments/.gitkeep | 0 docs/source/modules/zos_data_set.rst | 2 +- galaxy.yml | 5 +- meta/ibm_zos_core_meta.yml | 4 +- meta/runtime.yml | 2 +- plugins/module_utils/job.py | 1 - plugins/module_utils/zoau_version_checker.py | 6 +- plugins/modules/zos_copy.py | 51 +++++++- plugins/modules/zos_data_set.py | 2 +- scripts/mounts.sh | 0 .../functional/modules/test_zos_copy_func.py | 78 +++++++++++++ tests/unit/test_zoau_version_checker_unit.py | 109 ++++++++++++++++++ 17 files changed, 303 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/.gitkeep mode change 100755 => 100644 scripts/mounts.sh create mode 100644 tests/unit/test_zoau_version_checker_unit.py diff --git a/.ansible-lint b/.ansible-lint index a83129368..ac0ca0b26 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -25,9 +25,7 @@ exclude_paths: - tests/helpers - tests/requirements.txt - tests/unit - - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt - - tests/sanity/ignore-2.11.txt - venv* parseable: true quiet: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c19a39bbc..7cf358b23 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,13 +5,13 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.6.0-beta.1 -============= +v1.6.0 +====== Release Summary --------------- -Release Date: '2023-04-26' +Release Date: '2023-06-23' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -26,11 +26,11 @@ Minor Changes ------------- - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning".. warn Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) -- module_utils - job.py utility did not support positional wiled card placement, this enhancement uses `fnmatch` logic to support wild cards. +- module_utils - job.py utility did not support positional wild card placement, this enhancement uses `fnmatch` logic to support wild cards. - zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) - zos_copy - was enhanced to keep track of modified members in a destination dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) -- zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). -- zos_job_query - ansible module does not support positional wild card placement for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_data_set - add force parameter to enable member delete while PDS/e is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). +- zos_job_query - ansible module does not support positional wild card placement for `job_name` or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) - zos_lineinfile - would access data sets with exclusive access so no other task can read the data, this enhancement allows for a data set to be opened with a disposition set to share so that other tasks can access the data when option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) - zos_tso_command - was enhanced to accept `max_rc` as an option. This option allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) @@ -38,11 +38,18 @@ Bugfixes -------- - Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". -- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an approriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). +- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an appropriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- zos_copy - Encoding normalization used to handle newlines in text files was applied to binary files too. Fix makes sure that binary files bypass this normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) - zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). +- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/790) +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/816). - zos_encode - fixes a bug where converted files were not tagged afterwards with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). - zos_find - fixes a bug where find result values stopped being returned after first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) +- zos_gather_facts - Fixes an issue in the zoau version checker which prevented the zos_gather_facts module from running with newer versions of ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/797) - zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) New Modules diff --git a/README.md b/README.md index 756f06d92..5d48210a9 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,8 @@ executing operator commands, executing TSO commands, ping, querying operator actions, APF authorizing libraries, editing textual data in data sets or Unix System Services files, finding data sets, backing up and restoring data sets and -volumes, mounting file systems and running z/OS programs without JCL. +volumes, mounting file systems, running z/OS programs without JCL and +initializing volumes. Red Hat Ansible Certified Content for IBM Z @@ -49,7 +50,7 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.15. +This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.16. The Ansible and Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 2c3c67c65..fbc11cf4b 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -116,4 +116,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.6.0-beta.1 +version: 1.6.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 51bba3c4f..37049f8df 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -760,6 +760,44 @@ releases: name: zos_gather_facts namespace: '' release_date: '2022-11-02' + 1.6.0: + changes: + bugfixes: + - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM + data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). + - zos_copy - Encoding normalization used to handle newlines in text files was + applied to binary files too. Fix makes sure that binary files bypass this + normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) + - zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). + - zos_copy - kept permissions on target directory when copy overwrote files. + The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/790) + - zos_data_set - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical flow + that will evaluate the volumes, compare it to the provided value and if necessary + catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/816). + - zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). + - zos_gather_facts - Fixes an issue in the zoau version checker which prevented + the zos_gather_facts module from running with newer versions of ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/797) + release_summary: 'Release Date: ''2023-06-23'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 783_fix_zoau_version_checker.yml + - 790_overwrite_permissions_on_copy.yml + - 810_fix_binary_file_bypass.yml + - 813-ansible-lint.yml + - 814-zos_data_set-update-vsam-copy.yml + - 816-zos_data_set-update-vsam.yml + - v1.6.0_summary.yml + release_date: '2023-06-23' 1.6.0-beta.1: changes: bugfixes: diff --git a/changelogs/fragments/.gitkeep b/changelogs/fragments/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 8415694b0..9e051bece 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -56,7 +56,7 @@ state If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. - If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. diff --git a/galaxy.yml b/galaxy.yml index fad6b8f5c..87f10f272 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.6.0-beta.1 +version: 1.7.0-beta.1 # Collection README file readme: README.md @@ -80,6 +80,7 @@ build_ignore: - changelogs - collections - docs + - importer_result.json - scripts - test_config.yml - tests/*.ini @@ -90,7 +91,5 @@ build_ignore: - tests/helpers - tests/requirements.txt - tests/unit - - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt - - tests/sanity/ignore-2.11.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 484ad69fd..5e265309f 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.6.0-beta.1" +version: "1.7.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.2.2" + - "1.2.3" diff --git a/meta/runtime.yml b/meta/runtime.yml index 9f9843979..576832bc7 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.12.00' +requires_ansible: '>=2.9.0' diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d987d5a52..d07ef5ac5 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -240,7 +240,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"] = {} job["ret_code"]["msg"] = entry.status + " " + entry.rc job["ret_code"]["msg_code"] = entry.rc - job["ret_code"]["code"] = None if len(entry.rc) > 0: if entry.rc.isdigit(): diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 74494e04a..a5fff7196 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -27,8 +27,10 @@ def is_zoau_version_higher_than(min_version_str): bool -- Whether ZOAU version found was high enough. """ if is_valid_version_string(min_version_str): - # check zoau version on system + # check zoau version on system (already a list) system_version_list = get_zoau_version_str() + + # convert input to list format min_version_list = min_version_str.split('.') # convert list of strs to list of ints @@ -103,7 +105,7 @@ def get_zoau_version_str(): .stdout .decode('UTF-8') .strip() - .split(' ')[-1][1:] + .split(' ')[3][1:] .split('.') ) return version_list diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index fd7c8feac..ed8a691cc 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1256,7 +1256,7 @@ def _get_changed_files(self, src, dest, copy_directory): files_to_change.append(relative_path) # This change adds to the files_to_change variable any file that accord with - # a name found in the source copy. + # a name found in the source copy files_to_change.extend(existing_files) # Creating tuples with (filename, permissions). original_permissions = [ @@ -2245,6 +2245,53 @@ def normalize_line_endings(src, encoding=None): return src +def normalize_line_endings(src, encoding=None): + """ + Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes + its line endings to LF. + + Arguments: + src (str) -- Path of a USS file. + encoding (dict, optional) -- Encoding options for the module. + + Returns: + str -- Path to the normalized file. + """ + # Before copying into a destination dataset, we'll make sure that + # the source file doesn't contain any carriage returns that would + # result in empty records in the destination. + # Due to the differences between encodings, we'll normalize to IBM-037 + # before checking the EOL sequence. + enc_utils = encode.EncodeUtils() + src_tag = enc_utils.uss_file_tag(src) + copy_handler = CopyHandler(AnsibleModuleHelper(dict())) + + if src_tag == "untagged": + # This should only be true when src is a remote file and no encoding + # was specified by the user. + if not encoding: + encoding = {"from": encode.Defaults.get_default_system_charset()} + src_tag = encoding["from"] + + if src_tag != "IBM-037": + fd, converted_src = tempfile.mkstemp() + os.close(fd) + + enc_utils.uss_convert_encoding( + src, + converted_src, + src_tag, + "IBM-037" + ) + copy_handler._tag_file_encoding(converted_src, "IBM-037") + src = converted_src + + if copy_handler.file_has_crlf_endings(src): + src = copy_handler.create_temp_with_lf_endings(src) + + return src + + def run_module(module, arg_def): # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError @@ -2339,7 +2386,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_uss: + if not is_binary and not is_uss: new_src = temp_path or src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index ca6012978..73af4acf1 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -60,7 +60,7 @@ - > If I(state=absent) and I(volumes) is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided - I(volumes). If they volume attributes are different, the cataloged data set + I(volumes). If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with I(changed=False). diff --git a/scripts/mounts.sh b/scripts/mounts.sh old mode 100755 new mode 100644 diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 7b69a75b4..345e6067d 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -44,6 +44,11 @@ DUMMY_DATA_CRLF = b"00000001 DUMMY DATA\r\n00000002 DUMMY DATA\r\n" +# FD is outside of the range of UTF-8, so it should be useful when testing +# that binary data is not getting converted. +DUMMY_DATA_BINARY = b"\xFD\xFD\xFD\xFD" +DUMMY_DATA_BINARY_ESCAPED = "\\xFD\\xFD\\xFD\\xFD" + VSAM_RECORDS = """00000001A record 00000002A record 00000003A record @@ -1300,6 +1305,79 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): os.remove(src) +# The following two tests are to address the bugfix for issue #807. +@pytest.mark.uss +@pytest.mark.seq +def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): + hosts = ansible_zos_module + dest = "USER.TEST.SEQ.FUNCTEST" + + fd, src = tempfile.mkstemp() + os.close(fd) + with open(src, "wb") as infile: + infile.write(DUMMY_DATA_BINARY) + + try: + hosts.all.zos_data_set(name=dest, state="absent") + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=False, + is_binary=True + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + finally: + hosts.all.zos_data_set(name=dest, state="absent") + os.remove(src) + + +@pytest.mark.uss +@pytest.mark.seq +def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module): + hosts = ansible_zos_module + src = "/tmp/zos_copy_binary_file" + dest = "USER.TEST.SEQ.FUNCTEST" + + try: + hosts.all.zos_data_set(name=dest, state="absent") + + # Creating a binary file on the remote system through Python + # to avoid encoding issues if we were to copy a local file + # or use the shell directly. + python_cmd = """python3 -c 'with open("{0}", "wb") as f: f.write(b"{1}")'""".format( + src, + DUMMY_DATA_BINARY_ESCAPED + ) + python_result = hosts.all.shell(python_cmd) + for result in python_result.contacted.values(): + assert result.get("msg") is None or result.get("msg") == "" + assert result.get("stderr") is None or result.get("stderr") == "" + + # Because the original bug report used a file tagged as 'binary' + # on z/OS, we'll recreate that use case here. + hosts.all.shell("chtag -b {0}".format(src)) + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=True, + is_binary=True + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + finally: + hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.file(path=src, state="absent") + + @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ diff --git a/tests/unit/test_zoau_version_checker_unit.py b/tests/unit/test_zoau_version_checker_unit.py new file mode 100644 index 000000000..96031f4a1 --- /dev/null +++ b/tests/unit/test_zoau_version_checker_unit.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ibm_zos_core.plugins.module_utils.zoau_version_checker import ( + get_zoau_version_str, + is_valid_version_string, + is_zoau_version_higher_than +) + +import pytest, mock +import types + +# used in patch decorators below +IMPORT_NAME = "ibm_zos_core.plugins.module_utils.zoau_version_checker" + +# Tests for zoau_version_checker + +zoaversion_output = [ + + (['1','0','2'], "2020/03/03 19:24:41 CUT V1.0.2"), + (['1','0','3'], "2020/05/06 18:17:13 CUT V1.0.3"), + (['1','0','3'], "2020/07/07 14:54:31 CUT V1.0.3"), + (['1','1','0'], "2020/08/05 13:08:52 CUT V1.1.0"), + (['1','1','0'], "2020/08/20 12:50:07 CUT V1.1.0"), + (['1','1','0'], "2020/09/16 13:41:25 CUT V1.1.0"), + (['1','1','0'], "2020/09/25 14:07:34 CUT V1.1.0"), + (['1','1','1'], "2021/03/26 15:44:32 CUT V1.1.1"), + (['1','2','0'], "2021/07/07 22:36:30 CUT V1.2.0"), + (['1','2','0'], "2021/08/05 22:12:58 CUT V1.2.0"), + (['1','2','1'], "2022/07/12 18:35:28 CUT V1.2.1"), + (['1','2','1'], "2022/08/17 21:25:13 CUT V1.2.1"), + (['1','2','1'], "2022/08/25 21:44:21 CUT V1.2.1 31163ab 1856"), + (['1','2','1'], "2022/09/07 15:26:50 CUT V1.2.1 d2f6557 1880"), + (['1','2','3'], "2022/12/03 13:33:22 CUT V1.2.3 6113dc9 2512"), + (['1','2','2'], "2022/12/06 20:44:00 CUT V1.2.2 ee30137 2525"), + (['1','2','3'], "2023/03/16 18:17:00 CUT V1.2.3 1aa591fb 2148 PH50145"), + (['1', '2', '4', '0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + +] + + +@pytest.mark.parametrize("version_string, zoaversion", zoaversion_output) +@mock.patch('subprocess.run') +def test_get_zoau_version_str(mocker, version_string, zoaversion): + # 'get_zoau_version_str' makes a call to 'zoaversion' on the target host by + # calling 'subprocess.run', which returns an object with an attr 'stdout' + # that contains the byte string of the console output. The following mocks + # this behavior so the code can be tested without making a call to a host. + # Instead, zoaversion output for various versions of ZOAU are stored in the + # list of tuples 'zoaversion_output' above and returned by the mocked call + # to 'subprocess.run' after being converted to bytes. SimpleNamespace is an + # object subclass which allows for attributes to be set/removed. In our + # case, 'get_zoau_version_str' expects a 'stdout' attribute in the return + # struct of 'subprocess.run', which we mock via SimpleNamespace. + + mocker.return_value = types.SimpleNamespace( + stdout = bytes(zoaversion, 'utf-8'), + ) + + assert version_string == get_zoau_version_str() + + +@pytest.mark.parametrize("version_string, zoaversion", zoaversion_output) +def test_is_valid_version_string(version_string,zoaversion): + # The first parameter in our zoaversion_output list of tuples above is the + # return value of the function 'get_zoau_version_str' in the form of + # ['#','#','#'] or ['#','#','#','#']. A 'join' str operation with a dot(.) + # yields "#.#.#" or "#.#.#.#". And since these values are taken from this + # list, they can all be expected to be valid ZOAU verison strings. + + assert True == is_valid_version_string('.'.join(version_string)) + + +test_data = [ +# result, "sytem-level" ZOAU >= min-ZOAU + (True, ['1', '2', '1'], "1.2.1"), + (True, ['1', '2', '3'], "1.2.1"), + (True, ['1', '2', '4', '0'], "1.2.2"), + + (False, ['1', '1', '1'], "1.2.3"), + (False, ['1', '1', '1'], "1.2.4.0"), + (False, ['1', '2', '0', '1'], "1.2.1"), +] + + +@pytest.mark.parametrize("expected, sys_zoau, min_version_str", test_data) +@mock.patch(IMPORT_NAME+'.get_zoau_version_str') +def test_is_zoau_version_higher_than(mocker, expected, sys_zoau, min_version_str): + # The 'is_zoau_version_higher_than' function calls 'get_zoau_version_str' to + # get the ZOAU version string from the system. We mock that call and provide + # our own "system" level ZOAU version str to compare against our provided + # minimum ZOAU version string. + + mocker.return_value = sys_zoau + assert expected == is_zoau_version_higher_than(min_version_str) From 44c578e686ffe9e7ace7fdc8bfa3efbf48f38fd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 7 Jul 2023 13:43:17 -0600 Subject: [PATCH 138/413] Bug fix, zos_copy returns an error message when a concurrent copy fails (#794) * Test case to validate bug does not happen * First iteration for solutions * First proposal to validate_disposition * Remove unecesary test * Solvin unecesary code * Cover all cases with bug or false positive * Add test case to ensure behaviour * Get the better version of test case * Add fragment * Solve identation * Solve identation * Solve identation * Solve error in cleanup folders * Change function name * Change variables names * Solve wrote and write * Update changelog entry Signed-off-by: ddimatos <dimatos@gmail.com> * Better verbose and function name * Better message * Solve certification tests * Clearer and eficient version * continuation line over-indented solve * continuation line over-indented solve --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- ...or_message_when_concurrent_copy_fails.yaml | 6 ++ plugins/modules/zos_copy.py | 43 ++++++++++- .../functional/modules/test_zos_copy_func.py | 75 +++++++++++++++++++ 3 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml diff --git a/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml b/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml new file mode 100644 index 000000000..dd5b71220 --- /dev/null +++ b/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml @@ -0,0 +1,6 @@ +bugfixes: + - zos_copy - Reported a false positive such that the response would have + `changed=true` when copying from a source (src) or destination (dest) + data set that was in use (DISP=SHR). This change now displays an appropriate + error message and returns `changed=false`. + (https://github.com/ansible-collections/ibm_zos_core/pull/794). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ed8a691cc..6a3be27b8 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -737,7 +737,7 @@ from re import match as fullmatch try: - from zoautil_py import datasets + from zoautil_py import datasets, opercmd except Exception: datasets = MissingZOAUImport() @@ -2245,6 +2245,37 @@ def normalize_line_endings(src, encoding=None): return src +def data_set_locked(dataset_name): + """ + Checks if a data set is in use and therefore locked (DISP=SHR), which + is often caused by a long running task. Returns a boolean value to indicate the data set status. + + Arguments: + dataset_name (str) - the data set name used to check if there is a lock. + + Returns: + bool -- rue if the data set is locked, or False if the data set is not locked. + """ + # Using operator command "D GRS,RES=(*,{dataset_name})" to detect if a data set + # is in use, when a data set is in use it will have "EXC/SHR and SHARE" + # in the result with a length greater than 4. + result = dict() + result["stdout"] = [] + command_dgrs = "D GRS,RES=(*,{0})".format(dataset_name) + response = opercmd.execute(command=command_dgrs) + stdout = response.stdout_response + if stdout is not None: + for out in stdout.split("\n"): + if out: + result["stdout"].append(out) + if len(result["stdout"]) > 4 and "EXC/SHR" in stdout and "SHARE" in stdout: + return True + elif len(result["stdout"]) <= 4 and "NO REQUESTORS FOR RESOURCE" in stdout: + return False + else: + return False + + def normalize_line_endings(src, encoding=None): """ Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes @@ -2501,6 +2532,16 @@ def run_module(module, arg_def): ) # ******************************************************************** + # To validate the source and dest are not lock in a batch process by + # the machine and not generate a false positive check the disposition + # for try to write in dest and if both src and dest are in lock. + # ******************************************************************** + if dest_ds_type != "USS": + is_dest_lock = data_set_locked(dest_name) + if is_dest_lock: + module.fail_json( + msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + # ******************************************************************** # Backup should only be performed if dest is an existing file or # data set. Otherwise ignored. # ******************************************************************** diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 345e6067d..db6262bc9 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -22,6 +22,7 @@ from tempfile import mkstemp import subprocess + __metaclass__ = type @@ -117,6 +118,30 @@ """ +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + def populate_dir(dir_path): for i in range(5): with open(dir_path + "/" + "file" + str(i + 1), "w") as infile: @@ -1198,6 +1223,54 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, hosts.all.file(path=dest_path, state="absent") +@pytest.mark.seq +def test_copy_dest_lock(ansible_zos_module): + DATASET_1 = "USER.PRIVATE.TESTDS" + DATASET_2 = "ADMI.PRIVATE.TESTDS" + MEMBER_1 = "MEM1" + try: + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DATASET_1, state="present", type="pdse", replace=True) + hosts.all.zos_data_set(name=DATASET_2, state="present", type="pdse", replace=True) + hosts.all.zos_data_set(name=DATASET_1 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) + hosts.all.zos_data_set(name=DATASET_2 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) + # copy text_in source + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(DUMMY_DATA, DATASET_2+"({0})".format(MEMBER_1))) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DATASET_1, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + # pause to ensure c code acquires lock + time.sleep(5) + results = hosts.all.zos_copy( + src = DATASET_2 + "({0})".format(MEMBER_1), + dest = DATASET_1 + "({0})".format(MEMBER_1), + remote_src = True, + force = True + ) + for result in results.contacted.values(): + print(result) + assert result.get("changed") == False + assert result.get("msg") is not None + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DATASET_1, state="absent") + hosts.all.zos_data_set(name=DATASET_2, state="absent") + + @pytest.mark.uss @pytest.mark.seq def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): @@ -2984,6 +3057,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( hosts.all.zos_data_set(name=dest, state="absent") + @pytest.mark.parametrize("options", [ dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), @@ -3019,3 +3093,4 @@ def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): finally: hosts.all.file(path=options["dest"], state="absent") + From bfd30fc05c922c8fac6ec28eaef58b6d0d595670 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 7 Jul 2023 13:03:48 -0700 Subject: [PATCH 139/413] Enhancement/428/jinja2 template support (#667) * Added Jinja2 support for single files in zos_copy * Fixed cleanup of rendered templates * Added templating for directories * Fixed encoding conversion for directories (#616) While working on adding Jinja2 support for directories, I noticed that files that were in subdirectories would show garbage in a shell, which reminded me of issue #616. After implementing their suggestion, files now show the rendered templates. * Disabled rendering for remote sources * Enhanced exception handling for templates * Added encoding normalization for templates * Added templating tests to zos_copy * Added templating support to zos_job_submit * Fixed missing newline * Added failure when using templates with remote sources * Fixed encoding normalization * Fixed sanity tests issues * Added Jinja2 tests to zos_job_submit * Updated template test for zos_job_submit * Fixed template environment creation * Refactored template environment creation * Fixed initial size computation for local files * Fixed merge mistakes * Updated description for trim_blocks * Updated docs * Added test for Jinja2 loops * Added changelog fragment * Removed duplicated function * Changed try-except blocks used in action plugins * Updated template docs with variable precedence * Changed dictionary update in action plugin * Added another encoding for template tests * Fixed import errors in template.py * Fixed import error in Python 2.7 * Fixed bandit issue * Fixed template cleanup * Updated documentation --------- Co-authored-by: Demetri <dimatos@gmail.com> --- changelogs/fragments/667-template-support.yml | 4 + docs/source/modules/zos_copy.rst | 200 +++++++++++ docs/source/modules/zos_job_submit.rst | 127 +++++++ plugins/action/zos_copy.py | 67 +++- plugins/action/zos_job_submit.py | 49 +++ plugins/doc_fragments/template.py | 120 +++++++ plugins/module_utils/template.py | 336 ++++++++++++++++++ plugins/modules/zos_copy.py | 52 ++- plugins/modules/zos_job_submit.py | 28 ++ .../functional/modules/test_zos_copy_func.py | 300 ++++++++++++++++ .../modules/test_zos_job_submit_func.py | 112 +++++- tests/pytest.ini | 3 +- 12 files changed, 1388 insertions(+), 10 deletions(-) create mode 100644 changelogs/fragments/667-template-support.yml create mode 100644 plugins/doc_fragments/template.py create mode 100644 plugins/module_utils/template.py diff --git a/changelogs/fragments/667-template-support.yml b/changelogs/fragments/667-template-support.yml new file mode 100644 index 000000000..2ac499a3d --- /dev/null +++ b/changelogs/fragments/667-template-support.yml @@ -0,0 +1,4 @@ +minor_changes: + - Add support for Jinja2 templates in zos_copy and zos_job_submit + when using local source files. + (https://github.com/ansible-collections/ibm_zos_core/pull/667) \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 89be20ebb..45dee10a7 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -375,6 +375,133 @@ dest_data_set +use_template + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + + Only valid when ``src`` is a local file or directory. + + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + + | **required**: False + | **type**: bool + + +template_parameters + Options to set the way Jinja2 will process templates. + + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + + These options are ignored unless ``use_template`` is true. + + | **required**: False + | **type**: dict + + + variable_start_string + Marker for the beginning of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: {{ + + + variable_end_string + Marker for the end of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: }} + + + block_start_string + Marker for the beginning of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: {% + + + block_end_string + Marker for the end of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: %} + + + comment_start_string + Marker for the beginning of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: {# + + + comment_end_string + Marker for the end of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: #} + + + line_statement_prefix + Prefix used by Jinja2 to identify line-based statements. + + | **required**: False + | **type**: str + + + line_comment_prefix + Prefix used by Jinja2 to identify comment lines. + + | **required**: False + | **type**: str + + + lstrip_blocks + Whether Jinja2 should strip leading spaces from the start of a line to a block. + + | **required**: False + | **type**: bool + + + trim_blocks + Whether Jinja2 should remove the first newline after a block is removed. + + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + + | **required**: False + | **type**: bool + | **default**: True + + + keep_trailing_newline + Whether Jinja2 should keep the first trailing newline at the end of a template after rendering. + + | **required**: False + | **type**: bool + + + newline_sequence + Sequence that starts a newline in a template. + + | **required**: False + | **type**: str + | **default**: \\n + | **choices**: \\n, \\r, \\r\\n + + + auto_reload + Whether to reload a template file when it has changed after the task has started. + + | **required**: False + | **type**: bool + + + Examples @@ -595,6 +722,79 @@ dest | **type**: str | **sample**: SAMPLE.SEQ.DATA.SET +dest_created + Indicates whether the module created the destination. + + | **returned**: success and if dest was created by the module. + | **type**: bool + | **sample**: + + .. code-block:: json + + true + +destination_attributes + Attributes of a dest created by the module. + + | **returned**: success and destination was created by the module. + | **type**: dict + | **sample**: + + .. code-block:: json + + { + "block_size": 32760, + "record_format": "FB", + "record_length": 45, + "space_primary": 2, + "space_secondary": 1, + "space_type": "K", + "type": "PDSE" + } + + block_size + Block size of the dataset. + + | **type**: int + | **sample**: 32760 + + record_format + Record format of the dataset. + + | **type**: str + | **sample**: FB + + record_length + Record length of the dataset. + + | **type**: int + | **sample**: 45 + + space_primary + Allocated primary space for the dataset. + + | **type**: int + | **sample**: 2 + + space_secondary + Allocated secondary space for the dataset. + + | **type**: int + | **sample**: 1 + + space_type + Unit of measurement for space. + + | **type**: str + | **sample**: K + + type + Type of dataset allocated. + + | **type**: str + | **sample**: PDSE + + checksum SHA256 checksum of the file after running zos_copy. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 4375564bb..9714f2766 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -140,6 +140,133 @@ encoding +use_template + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + + Only valid when ``src`` is a local file or directory. + + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + + | **required**: False + | **type**: bool + + +template_parameters + Options to set the way Jinja2 will process templates. + + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + + These options are ignored unless ``use_template`` is true. + + | **required**: False + | **type**: dict + + + variable_start_string + Marker for the beginning of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: {{ + + + variable_end_string + Marker for the end of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: }} + + + block_start_string + Marker for the beginning of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: {% + + + block_end_string + Marker for the end of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: %} + + + comment_start_string + Marker for the beginning of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: {# + + + comment_end_string + Marker for the end of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: #} + + + line_statement_prefix + Prefix used by Jinja2 to identify line-based statements. + + | **required**: False + | **type**: str + + + line_comment_prefix + Prefix used by Jinja2 to identify comment lines. + + | **required**: False + | **type**: str + + + lstrip_blocks + Whether Jinja2 should strip leading spaces from the start of a line to a block. + + | **required**: False + | **type**: bool + + + trim_blocks + Whether Jinja2 should remove the first newline after a block is removed. + + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + + | **required**: False + | **type**: bool + | **default**: True + + + keep_trailing_newline + Whether Jinja2 should keep the first trailing newline at the end of a template after rendering. + + | **required**: False + | **type**: bool + + + newline_sequence + Sequence that starts a newline in a template. + + | **required**: False + | **type**: str + | **default**: \\n + | **choices**: \\n, \\r, \\r\\n + + + auto_reload + Whether to reload a template file when it has changed after the task has started. + + | **required**: False + | **type**: bool + + + Examples diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index b98432aa6..b557e8605 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -16,6 +16,7 @@ import os import stat import time +import shutil from tempfile import mkstemp, gettempprefix @@ -34,6 +35,8 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template + display = Display() @@ -112,11 +115,18 @@ def run(self, tmp=None, task_vars=None): msg = "Backup file provided but 'backup' parameter is False" return self._exit_action(result, msg, failed=True) + use_template = _process_boolean(task_args.get("use_template"), default=False) + if remote_src and use_template: + msg = "Use of Jinja2 templates is only valid for local files, remote_src cannot be set to true." + return self._exit_action(result, msg, failed=True) + if not is_uss: if mode or owner or group: msg = "Cannot specify 'mode', 'owner' or 'group' for MVS destination" return self._exit_action(result, msg, failed=True) + template_dir = None + if not remote_src: if local_follow and not src: msg = "No path given for local symlink" @@ -150,14 +160,65 @@ def run(self, tmp=None, task_vars=None): dict(src=src, dest=dest, changed=False, failed=True) ) return result + + if use_template: + template_parameters = task_args.get("template_parameters", dict()) + if encoding: + template_encoding = encoding.get("from", None) + else: + template_encoding = None + + try: + renderer = template.create_template_environment( + template_parameters, + src, + template_encoding + ) + template_dir, rendered_dir = renderer.render_dir_template( + task_vars.get("vars", dict()) + ) + except Exception as err: + if template_dir: + shutil.rmtree(template_dir, ignore_errors=True) + return self._exit_action(result, str(err), failed=True) + + src = rendered_dir + task_args["size"] = sum( - os.stat(path + "/" + f).st_size for f in files + os.stat(os.path.join(path, f)).st_size + for path, dirs, files in os.walk(src) + for f in files ) else: if mode == "preserve": task_args["mode"] = "0{0:o}".format( stat.S_IMODE(os.stat(src).st_mode) ) + + if use_template: + template_parameters = task_args.get("template_parameters", dict()) + if encoding: + template_encoding = encoding.get("from", None) + else: + template_encoding = None + + try: + renderer = template.create_template_environment( + template_parameters, + src, + template_encoding + ) + template_dir, rendered_file = renderer.render_file_template( + os.path.basename(src), + task_vars.get("vars", dict()) + ) + except Exception as err: + if template_dir: + shutil.rmtree(template_dir, ignore_errors=True) + return self._exit_action(result, str(err), failed=True) + + src = rendered_file + task_args["size"] = os.stat(src).st_size display.vvv(u"ibm_zos_copy calculated size: {0}".format(os.stat(src).st_size), host=self._play_context.remote_addr) transfer_res = self._copy_to_remote( @@ -187,6 +248,10 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars, ) + # Erasing all rendered Jinja2 templates from the controller. + if template_dir: + shutil.rmtree(template_dir, ignore_errors=True) + if copy_res.get("note") and not force: result["note"] = copy_res.get("note") return result diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 7247f6b7b..715ce57ed 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -17,8 +17,11 @@ from ansible.errors import AnsibleError, AnsibleFileNotFound # from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text +from ansible.module_utils.parsing.convert_bool import boolean import os +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template + class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): @@ -32,6 +35,17 @@ def run(self, tmp=None, task_vars=None): return result module_args = self._task.args.copy() + + use_template = _process_boolean(module_args.get("use_template")) + location = module_args.get("location") + if use_template and location != "LOCAL": + result.update(dict( + failed=True, + changed=False, + msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'LOCAL'".format(location) + )) + return result + if module_args["location"] == "LOCAL": source = self._task.args.get("src", None) @@ -94,6 +108,30 @@ def run(self, tmp=None, task_vars=None): tmp_src = self._connection._shell.join_path(tmp, "source") + rendered_file = None + if use_template: + template_parameters = module_args.get("template_parameters", dict()) + encoding = module_args.get("encoding", dict()) + + try: + renderer = template.create_template_environment( + template_parameters, + source_full, + encoding.get("from", None) + ) + template_dir, rendered_file = renderer.render_file_template( + os.path.basename(source_full), + task_vars + ) + except Exception as err: + result["msg"] = to_text(err) + result["failed"] = True + result["changed"] = False + result["invocation"] = dict(module_args=module_args) + return result + + source_full = rendered_file + remote_path = None remote_path = self._transfer_file(source_full, tmp_src) @@ -127,6 +165,10 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars, ) ) + + if rendered_file: + os.remove(rendered_file) + else: result.update( self._execute_module( @@ -153,3 +195,10 @@ def delete_dict_entries(entries, dictionary): # entries = ('checksum', 'dest', 'gid', 'group', 'md5sum', 'mode', 'owner', 'size', 'src', 'state', 'uid') # delete_dict_entries(entries, result) return result + + +def _process_boolean(arg, default=False): + try: + return boolean(arg) + except TypeError: + return default diff --git a/plugins/doc_fragments/template.py b/plugins/doc_fragments/template.py new file mode 100644 index 000000000..1eea4ad3d --- /dev/null +++ b/plugins/doc_fragments/template.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: + use_template: + description: + - Whether the module should treat C(src) as a Jinja2 template and + render it before continuing with the rest of the module. + - Only valid when C(src) is a local file or directory. + - All variables defined in inventory files, vars files and the playbook + will be passed to the template engine, + as well as L(Ansible special variables,https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables), + such as C(playbook_dir), C(ansible_version), etc. + - If variables defined in different scopes share the same name, Ansible will + apply variable precedence to them. You can see the complete precedence order + L(in Ansible's documentation,https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence) + type: bool + default: false + template_parameters: + description: + - Options to set the way Jinja2 will process templates. + - Jinja2 already sets defaults for the markers it uses, you can find more + information at its L(official documentation,https://jinja.palletsprojects.com/en/latest/templates/). + - These options are ignored unless C(use_template) is true. + required: false + type: dict + suboptions: + variable_start_string: + description: + - Marker for the beginning of a statement to print a variable in Jinja2. + type: str + default: '{{' + variable_end_string: + description: + - Marker for the end of a statement to print a variable in Jinja2. + type: str + default: '}}' + block_start_string: + description: + - Marker for the beginning of a block in Jinja2. + type: str + default: '{%' + block_end_string: + description: + - Marker for the end of a block in Jinja2. + type: str + default: '%}' + comment_start_string: + description: + - Marker for the beginning of a comment in Jinja2. + type: str + default: '{#' + comment_end_string: + description: + - Marker for the end of a comment in Jinja2. + type: str + default: '#}' + line_statement_prefix: + description: + - Prefix used by Jinja2 to identify line-based statements. + type: str + required: false + line_comment_prefix: + description: + - Prefix used by Jinja2 to identify comment lines. + type: str + required: false + lstrip_blocks: + description: + - Whether Jinja2 should strip leading spaces from the start of a line + to a block. + type: bool + default: false + trim_blocks: + description: + - Whether Jinja2 should remove the first newline after a block is removed. + - Setting this option to C(False) will result in newlines being added to + the rendered template. This could create invalid code when working with + JCL templates or empty records in destination data sets. + type: bool + default: true + keep_trailing_newline: + description: + - Whether Jinja2 should keep the first trailing newline at the end of a + template after rendering. + type: bool + default: false + newline_sequence: + description: + - Sequence that starts a newline in a template. + type: str + default: "\n" + choices: + - "\n" + - "\r" + - "\r\n" + auto_reload: + description: + - Whether to reload a template file when it has changed after the task + has started. + type: bool + default: false +''' diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py new file mode 100644 index 000000000..3f0c95021 --- /dev/null +++ b/plugins/module_utils/template.py @@ -0,0 +1,336 @@ +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import os +import tempfile +from os import path + +from ansible.module_utils._text import to_native +from ansible.module_utils.parsing.convert_bool import boolean + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingImport, +) + +# This module is to be used locally, so jinja2 only needs to be installed in the +# controller, but Ansible sanity testing simulates what would happen if a managed +# node tried to use this module_util, hence the use of MissingImport. +try: + import jinja2 +except Exception: + jinja2 = MissingImport("jinja2") + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode + + +def _process_boolean(arg, default=False): + try: + return boolean(arg) + except TypeError: + return default + + +def create_template_environment(template_parameters, src, template_encoding=None): + """Parses boolean parameters for Jinja2 and returns a TemplateRenderer + instance. + + Arguments: + template_parametrs (dict): Parameters for creating the template environment. + src (str): Local path where the templates are located. + template_encoding (dict, optional): encoding used by the templates. If not + given, the default locale set in the system will be used. + + Returns: + TemplateRenderer -- Object with a new template environment ready to + render the templates found in src. + """ + if template_parameters.get("lstrip_blocks"): + template_parameters["lstrip_blocks"] = _process_boolean(template_parameters.get("lstrip_blocks"), default=False) + if template_parameters.get("trim_blocks"): + template_parameters["trim_blocks"] = _process_boolean(template_parameters.get("trim_blocks"), default=True) + if template_parameters.get("keep_trailing_newline"): + template_parameters["keep_trailing_newline"] = _process_boolean(template_parameters.get("keep_trailing_newline"), default=False) + if template_parameters.get("auto_reload"): + template_parameters["auto_reload"] = _process_boolean(template_parameters.get("auto_reload"), default=False) + + if not template_encoding: + template_encoding = encode.Defaults.get_default_system_charset() + + return TemplateRenderer(src, template_encoding, **template_parameters) + + +class TemplateRenderer: + """This class implements functionality to load and render Jinja2 + templates. To add support for Jinja2 in a module, you need to include + the template.py doc fragment, add the options for configuring the Jinja2 + environment to the module's options, and instantiate this class to + render templates inside an action plugin. + """ + + _ALLOWED_NEWLINE_DELIMITERS = ["\n", "\r", "\r\n"] + _FIXABLE_NEWLINE_DELIMITERS = ["\\n", "\\r", "\\r\\n"] + _NEWLINE_DELIMITER_SWAP = { + "\\n": "\n", + "\\r": "\r", + "\\r\\n": "\r\n" + } + + def __init__( + self, + template_path, + encoding, + variable_start_string="{{", + variable_end_string="}}", + block_start_string="{%", + block_end_string="%}", + comment_start_string="{#", + comment_end_string="#}", + line_statement_prefix=None, + line_comment_prefix=None, + lstrip_blocks=False, + trim_blocks=True, + keep_trailing_newline=False, + newline_sequence="\n", + auto_reload=False, + ): + """Initializes a new TemplateRenderer object with a Jinja2 + environment that can use templates from a given directory. + More information about Jinja2 templates and environments can + be found at https://jinja.palletsprojects.com/en/3.0.x/api/. + + Arguments: + template_path (str): Path to a Jinja2 template file or directory. + encoding (str): Encoding for rendered templates. + variable_start_string (str, optional): Marker for the beginning of + a statement to print a variable in Jinja2. + variable_end_string (str, optional): Marker for the end of + a statement to print a variable in Jinja2. + block_start_string (str, optional): Marker for the beginning of + a block in Jinja2. + block_end_string (str, optional): Marker for the end of a block + in Jinja2. + comment_start_string (str, optional): Marker for the beginning of + a comment in Jinja2. + comment_end_string (str, optional): Marker for the end of a comment + in Jinja2. + line_statement_prefix (str, optional): Prefix used by Jinja2 to identify + line-based statements. + line_comment_prefix (str, optional): Prefix used by Jinja2 to identify + comment lines. + lstrip_blocks (bool, optional): Whether Jinja2 should strip leading spaces + from the start of a line to a block. + trim_blocks (bool, optional): Whether Jinja2 should remove the first + newline after a block is removed. + keep_trailing_newline (bool, optional): Whether Jinja2 should keep the + first trailing newline at the end of a template after rendering. + newline_sequence (str, optional): Sequence that starts a newline in a + template. Valid values are '\n', '\r', '\r\n'. + auto_reload (bool, optional): Whether to reload a template file when it + has changed after creating the Jinja2 environment. + + Raises: + FileNotFoundError: When template_path points to a non-existent + file or directory. + ValueError: When the newline sequence is not valid. + """ + if not path.exists(template_path): + raise FileNotFoundError("The template path {0} does not exist".format( + template_path + )) + + template_canonical_path = path.realpath(template_path) + if path.isdir(template_canonical_path): + template_dir = template_canonical_path + else: + template_dir = path.dirname(template_canonical_path) + + if newline_sequence in self._FIXABLE_NEWLINE_DELIMITERS: + newline_sequence = self._NEWLINE_DELIMITER_SWAP[newline_sequence] + + if newline_sequence not in self._ALLOWED_NEWLINE_DELIMITERS: + raise ValueError("Newline delimiter '{0}' is not valid".format( + to_native(newline_sequence) + )) + + self.encoding = encoding + self.template_dir = template_dir + self.templating_env = jinja2.Environment( + block_start_string=block_start_string, + block_end_string=block_end_string, + variable_start_string=variable_start_string, + variable_end_string=variable_end_string, + comment_start_string=comment_start_string, + comment_end_string=comment_end_string, + line_statement_prefix=line_statement_prefix, + line_comment_prefix=line_comment_prefix, + trim_blocks=trim_blocks, + lstrip_blocks=lstrip_blocks, + newline_sequence=newline_sequence, + keep_trailing_newline=keep_trailing_newline, + loader=jinja2.FileSystemLoader( + searchpath=template_dir, + encoding=encoding, + ), + auto_reload=auto_reload, + autoescape=True, + ) + + def render_file_template(self, file_path, variables): + """Loads a template from the templates directory and renders + it using the Jinja2 environment configured in the object. + + Arguments: + file_path (str): Relative path (from the template directory) + to a template. + variables (dict): Dictionary containing the variables and + their values that will be substituted in the template. + + Returns: + tuple -- Filepath to a temporary directory that contains the + rendered template, and the complete filepath to the + rendered template. + + Raises: + TemplateNotFound: When the template file doesn't exist in the + template directory. + TemplateError: When rendering of the template fails. + FileExistsError: When there is an error while trying to create the + temp directory for rendered templates. + PermissionError: When there is an error accessing the temp directory. + IOError: When there is an error writing the rendered template. + ValueError: When there is an error writing the rendered template. + """ + try: + template = self.templating_env.get_template(file_path) + rendered_contents = template.render(variables) + except jinja2.TemplateNotFound as err: + raise jinja2.TemplateNotFound("Template {0} was not found: {1}".format( + file_path, + to_native(err) + )) + except jinja2.TemplateError as err: + raise jinja2.TemplateError("Error while rendering {0}: {1}".format( + file_path, + to_native(err) + )) + + try: + temp_template_dir = tempfile.mkdtemp() + except FileExistsError as err: + raise FileExistsError("Unable to create directory for rendered templates: {0}".format( + to_native(err) + )) + except PermissionError as err: + raise PermissionError("Error while trying to access temporary directory for templates: {0}".format( + to_native(err) + )) + + try: + template_file_path = path.join(temp_template_dir, file_path) + with open(template_file_path, mode="w", encoding=self.encoding) as template: + template.write(rendered_contents) + # There could be encoding errors. + except IOError as err: + raise IOError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + except ValueError as err: + raise ValueError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + + return temp_template_dir, template_file_path + + def render_dir_template(self, variables): + """Loads all templates from a directory and renders + them using the Jinja2 environment configured in the object. + + Arguments: + variables (dict): Dictionary containing the variables and + their values that will be substituted in the template. + + Returns: + tuple -- Filepath to a temporary directory that contains the + rendered templates, and the complete filepath to the + rendered templates' directory. + + Raises: + TemplateNotFound: When the template file doesn't exist in the + template directory. + TemplateError: When rendering of the template fails. + FileExistsError: When there is an error while trying to create the + temp directory for rendered templates. + PermissionError: When there is an error accessing the temp directory. + OSError: When there is an error while trying to create the + temp directory for rendered templates. + IOError: When there is an error writing the rendered template. + ValueError: When there is an error writing the rendered template. + """ + try: + temp_parent_dir = tempfile.mkdtemp() + last_dir = os.path.basename(self.template_dir) + temp_template_dir = os.path.join(temp_parent_dir, last_dir) + os.makedirs(temp_template_dir, exist_ok=True) + except FileExistsError as err: + raise FileExistsError("Unable to create directory for rendered templates: {0}".format( + to_native(err) + )) + except PermissionError as err: + raise PermissionError("Error while trying to access temporary directory: {0}".format( + to_native(err) + )) + except OSError as err: + raise OSError("Error while trying to access temporary directory: {0}".format( + to_native(err) + )) + + for path, subdirs, files in os.walk(self.template_dir): + for template_file in files: + relative_dir = os.path.relpath(path, self.template_dir) + file_path = os.path.normpath(os.path.join(relative_dir, template_file)) + + try: + template = self.templating_env.get_template(file_path) + rendered_contents = template.render(variables) + except jinja2.TemplateNotFound as err: + raise jinja2.TemplateNotFound("Template {0} was not found: {1}".format( + file_path, + to_native(err) + )) + except jinja2.TemplateError as err: + raise jinja2.TemplateError("Error while rendering {0}: {1}".format( + file_path, + to_native(err) + )) + + try: + template_file_path = os.path.join(temp_template_dir, file_path) + os.makedirs(os.path.dirname(template_file_path), exist_ok=True) + with open(template_file_path, mode="w", encoding=self.encoding) as temp: + temp.write(rendered_contents) + except IOError as err: + raise IOError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + except ValueError as err: + raise ValueError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + + return temp_parent_dir, temp_template_dir diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6a3be27b8..0998f2a0e 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -364,6 +364,9 @@ type: str required: false +extends_documentation_fragment: + - ibm.ibm_zos_core.template + notes: - Destination data sets are assumed to be in catalog. When trying to copy to an uncataloged data set, the module assumes that the data set does @@ -2688,8 +2691,8 @@ def run_module(module, arg_def): try: if encoding: # 'conv_path' points to the converted src file or directory - if is_mvs_dest: - encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET + # if is_mvs_dest: + # encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET conv_path = copy_handler.convert_encoding(src, temp_path, encoding) @@ -2869,6 +2872,30 @@ def main(): sms_management_class=dict(type="str", required=False), ) ), + use_template=dict(type='bool', default=False), + template_parameters=dict( + type='dict', + required=False, + options=dict( + variable_start_string=dict(type='str', default='{{'), + variable_end_string=dict(type='str', default='}}'), + block_start_string=dict(type='str', default='{%'), + block_end_string=dict(type='str', default='%}'), + comment_start_string=dict(type='str', default='{#'), + comment_end_string=dict(type='str', default='#}'), + line_statement_prefix=dict(type='str', required=False), + line_comment_prefix=dict(type='str', required=False), + lstrip_blocks=dict(type='bool', default=False), + trim_blocks=dict(type='bool', default=True), + keep_trailing_newline=dict(type='bool', default=False), + newline_sequence=dict( + type='str', + default='\n', + choices=['\n', '\r', '\r\n'] + ), + auto_reload=dict(type='bool', default=False), + ) + ), is_uss=dict(type='bool'), is_pds=dict(type='bool'), is_src_dir=dict(type='bool'), @@ -2918,6 +2945,27 @@ def main(): sms_management_class=dict(arg_type="str", required=False), ) ), + + use_template=dict(arg_type='bool', required=False), + template_parameters=dict( + arg_type='dict', + required=False, + options=dict( + variable_start_string=dict(arg_type='str', required=False), + variable_end_string=dict(arg_type='str', required=False), + block_start_string=dict(arg_type='str', required=False), + block_end_string=dict(arg_type='str', required=False), + comment_start_string=dict(arg_type='str', required=False), + comment_end_string=dict(arg_type='str', required=False), + line_statement_prefix=dict(arg_type='str', required=False), + line_comment_prefix=dict(arg_type='str', required=False), + lstrip_blocks=dict(arg_type='bool', required=False), + trim_blocks=dict(arg_type='bool', required=False), + keep_trailing_newline=dict(arg_type='bool', required=False), + newline_sequence=dict(arg_type='str', required=False), + auto_reload=dict(arg_type='bool', required=False), + ) + ), ) if ( diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 97cbbc4a7..bc9f8ff19 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -130,6 +130,10 @@ required: false type: str default: IBM-1047 + +extends_documentation_fragment: + - ibm.ibm_zos_core.template + notes: - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). @@ -796,6 +800,30 @@ def run_module(): wait_time_s=dict(type="int", default=10), max_rc=dict(type="int", required=False), temp_file=dict(type="path", required=False), + use_template=dict(type='bool', default=False), + template_parameters=dict( + type='dict', + required=False, + options=dict( + variable_start_string=dict(type='str', default='{{'), + variable_end_string=dict(type='str', default='}}'), + block_start_string=dict(type='str', default='{%'), + block_end_string=dict(type='str', default='%}'), + comment_start_string=dict(type='str', default='{#'), + comment_end_string=dict(type='str', default='#}'), + line_statement_prefix=dict(type='str', required=False), + line_comment_prefix=dict(type='str', required=False), + lstrip_blocks=dict(type='bool', default=False), + trim_blocks=dict(type='bool', default=True), + keep_trailing_newline=dict(type='bool', default=False), + newline_sequence=dict( + type='str', + default='\n', + choices=['\n', '\r', '\r\n'] + ), + auto_reload=dict(type='bool', default=False), + ) + ), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index db6262bc9..5604527a3 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -55,6 +55,38 @@ 00000003A record """ +TEMPLATE_CONTENT = """ +This is a Jinja2 test: {{ var }} + +{# This is a comment. #} + +If: +{% if if_var is divisibleby 5 %} +Condition is true. +{% endif %} + +Inside a loop: +{% for i in array %} +Current element: {{ i }} +{% endfor %} +""" + +TEMPLATE_CONTENT_NON_DEFAULT_MARKERS = """ +This is a Jinja2 test: (( var )) + +#% This is a comment. %# + +If: +{% if if_var is divisibleby 5 %} +Condition is true. +{% endif %} + +Inside a loop: +{% for i in array %} +Current element: (( i )) +{% endfor %} +""" + # SHELL_EXECUTABLE = "/usr/lpp/rsusr/ported/bin/bash" SHELL_EXECUTABLE = "/bin/sh" TEST_PS = "IMSTESTL.IMS01.DDCHKPT" @@ -148,6 +180,16 @@ def populate_dir(dir_path): infile.write(DUMMY_DATA) +def create_template_file(dir_path, use_default_markers=True, encoding="utf-8"): + content = TEMPLATE_CONTENT if use_default_markers else TEMPLATE_CONTENT_NON_DEFAULT_MARKERS + template_path = os.path.join(dir_path, "template") + + with open(template_path, "w", encoding=encoding) as infile: + infile.write(content) + + return template_path + + def populate_dir_crlf_endings(dir_path): for i in range(5): with open(os.path.join(dir_path, "file{0}".format(i)), "wb") as infile: @@ -1190,6 +1232,264 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): @pytest.mark.uss +@pytest.mark.template +@pytest.mark.parametrize("encoding", ["utf-8", "iso8859-1"]) +def test_copy_template_file(ansible_zos_module, encoding): + hosts = ansible_zos_module + dest_path = "/tmp/new_dir" + temp_dir = tempfile.mkdtemp() + + try: + temp_template = create_template_file( + temp_dir, + use_default_markers=True, + encoding=encoding + ) + dest_template = os.path.join(dest_path, os.path.basename(temp_template)) + + hosts.all.file(path=dest_path, state="directory") + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_template, + dest=dest_path, + use_template=True, + encoding={ + "from": encoding, + "to": "IBM-1047" + } + ) + + verify_copy = hosts.all.shell( + cmd="cat {0}".format(dest_template), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_template + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.file(path=dest_path, state="absent") + shutil.rmtree(temp_dir) + + +@pytest.mark.uss +@pytest.mark.template +def test_copy_template_dir(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/new_dir" + + # Ensuring there's a traling slash to copy the contents of the directory. + temp_dir = os.path.normpath(tempfile.mkdtemp()) + temp_dir = "{0}/".format(temp_dir) + + temp_subdir_a = os.path.join(temp_dir, "subdir_a") + temp_subdir_b = os.path.join(temp_dir, "subdir_b") + os.makedirs(temp_subdir_a) + os.makedirs(temp_subdir_b) + + try: + temp_template_a = create_template_file(temp_subdir_a, use_default_markers=True) + temp_template_b = create_template_file(temp_subdir_b, use_default_markers=True) + dest_template_a = os.path.join( + dest_path, + "subdir_a", + os.path.basename(temp_template_a) + ) + dest_template_b = os.path.join( + dest_path, + "subdir_b", + os.path.basename(temp_template_b) + ) + + hosts.all.file(path=dest_path, state="directory") + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_dir, + dest=dest_path, + use_template=True, + force=True + ) + + verify_copy_a = hosts.all.shell( + cmd="cat {0}".format(dest_template_a), + executable=SHELL_EXECUTABLE, + ) + verify_copy_b = hosts.all.shell( + cmd="cat {0}".format(dest_template_b), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_path + for v_cp in verify_copy_a.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + for v_cp in verify_copy_b.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.file(path=dest_path, state="absent") + shutil.rmtree(temp_dir) + + +@pytest.mark.uss +@pytest.mark.template +def test_copy_template_file_with_non_default_markers(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/new_dir" + temp_dir = tempfile.mkdtemp() + + try: + temp_template = create_template_file(temp_dir, use_default_markers=False) + dest_template = os.path.join(dest_path, os.path.basename(temp_template)) + + hosts.all.file(path=dest_path, state="directory") + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_template, + dest=dest_path, + use_template=True, + template_parameters=dict( + variable_start_string="((", + variable_end_string="))", + comment_start_string="#%", + comment_end_string="%#" + ) + ) + + verify_copy = hosts.all.shell( + cmd="cat {0}".format(dest_template), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_template + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "((" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "#%" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.file(path=dest_path, state="absent") + shutil.rmtree(temp_dir) + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.template +def test_copy_template_file_to_dataset(ansible_zos_module): + hosts = ansible_zos_module + dest_dataset = "USER.TEST.TEMPLATE" + temp_dir = tempfile.mkdtemp() + + try: + temp_template = create_template_file(temp_dir, use_default_markers=True) + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_template, + dest=dest_dataset, + use_template=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest_dataset), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_dataset + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.zos_data_set(name=dest_dataset, state="absent") + shutil.rmtree(temp_dir) + + @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_remote=False), dict(src="/etc/profile", is_remote=True),]) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 3364d12da..b7b1ec5f0 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -19,10 +19,10 @@ import tempfile import pytest import re +import os from pprint import pprint - # ############################################################################## # Configure the job card as needed, most common keyword parameters: # CLASS: Used to achieve a balance between different types of jobs and avoid @@ -118,9 +118,49 @@ /* """ +JCL_TEMPLATES = { + "Default": """//{{ pgm_name }} JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +{# This comment should not be part of the JCL #} +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD {{ input_dataset }} +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +{{ message }} +/* +//SYSUT2 DD SYSOUT=* +// +""", + + "Custom": """//(( pgm_name )) JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +//STEP0001 EXEC PGM=IEBGENER +(# This comment should not be part of the JCL #) +//SYSIN DD (( input_dataset )) +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +(( message )) +/* +//SYSUT2 DD SYSOUT=* +// +""", + + "Loop": """//JINJA JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +//STEP0001 EXEC PGM=IEFBR14 +{% for item in steps %} +//SYS{{ item.step_name }} DD {{ item.dd }} +{% endfor %} +Hello, world! +/* +//SYSUT2 DD SYSOUT=* +// +""" +} + JCL_FILE_CONTENTS_NO_DSN = """//* //****************************************************************************** -//* Job containing a non existent DSN that will force an error. +//* Job containing a non existent DSN that will force an error. //* Returns: //* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) //* msg --> The JCL submitted with job id JOB00532 but there was an error, @@ -152,11 +192,10 @@ # // # """ - JCL_FILE_CONTENTS_JCL_ERROR_INT = """//* //****************************************************************************** //* Another job containing no job card resulting in a JCLERROR with an value. It -//* won't always be 952, it will increment. +//* won't always be 952, it will increment. //* Returns: //* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) //* msg --> The JCL submitted with job id JOB00728 but there was an error, @@ -192,7 +231,6 @@ // """ - JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* //****************************************************************************** //* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and @@ -216,7 +254,6 @@ // """ - TEMP_PATH = "/tmp/jcl" DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -513,6 +550,68 @@ def test_job_submit_max_rc(ansible_zos_module, args): hosts.all.file(path=tmp_file.name, state="absent") +@pytest.mark.template +@pytest.mark.parametrize("args", [ + dict( + template="Default", + options=dict( + keep_trailing_newline=False + ) + ), + dict( + template="Custom", + options=dict( + keep_trailing_newline=False, + variable_start_string="((", + variable_end_string="))", + comment_start_string="(#", + comment_end_string="#)" + ) + ), + dict( + template="Loop", + options=dict( + keep_trailing_newline=False + ) + ) +]) +def test_job_submit_jinja_template(ansible_zos_module, args): + try: + hosts = ansible_zos_module + + tmp_file = tempfile.NamedTemporaryFile(delete=False) + with open(tmp_file.name, "w") as f: + f.write(JCL_TEMPLATES[args["template"]]) + + template_vars = dict( + pgm_name="HELLO", + input_dataset="DUMMY", + message="Hello, world", + steps=[ + dict(step_name="IN", dd="DUMMY"), + dict(step_name="PRINT", dd="SYSOUT=*"), + dict(step_name="UT1", dd="*") + ] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + results = hosts.all.zos_job_submit( + src=tmp_file.name, + location="LOCAL", + use_template=True, + template_parameters=args["options"] + ) + + for result in results.contacted.values(): + assert result.get('changed') is True + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + finally: + os.remove(tmp_file.name) + + def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: @@ -541,6 +640,7 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): assert result.get("jobs")[0].get("job_id") is not None assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: diff --git a/tests/pytest.ini b/tests/pytest.ini index c395f66f7..a9324aaae 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -7,4 +7,5 @@ markers = uss: uss test cases. seq: sequential data sets test cases. pdse: partitioned data sets test cases. - vsam: VSAM data sets test cases. \ No newline at end of file + vsam: VSAM data sets test cases. + template: Jinja2 templating test cases. \ No newline at end of file From fc84aa486a59468c6ec185f9ae5ae3ec0f8b285d Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 17 Jul 2023 20:22:44 -0600 Subject: [PATCH 140/413] Module zos_unarchive & zos_archive (#755) * Initial commit * Created template for zos_unarchive * Initial boiler plate for unarchive * Added unarchive handler selection * Added file/data set search * Adding AMATERSE unpacking * Added support for terse unarchive * Remove zos_archive module from the branch * Adding RECEIVE/XMIT support * Added temporary dataset removal * Adding RECEIVE as an mvscmd * Add RECEIVE using mvs_cmd * Add unpacked datasets display * Added display of unpacked targets support for multiple data sets * Added alias to options * Added include/ exclude options * Added include for xmit and terse * Modified include for all * Adding volume selection for SMS managed * Added list support for MVS archives * Removed unused var * Add force support for mvs data sets * Add archive listing for tar, bz2, and gz * Add unarchive all for tar, gz and bz2 * Added include/exclude support for tar, gz, bz2 * Add mvs_cmd amaterse * Modify mvs_cmd call * Add archive.py * Add latest zos_archive * Refactor mvs_cmd * Remove comments * Adding tests for zos_archive * Added unzip * Added arcroot logic and tarfile * Added changed logic * lastest changes * Multiple changes to zos_archive and zos_unarchive and its tests * Added support for pax * added list tests * Created action plugin for zos_unarchive * Add support for remove * Adding tests for exclusion list USS * Added tests * Add dest_data_set info * Adding multiple test cases and updated docs * Added test for mode * Removed unused stuff * Modified XMIT command * Added expanded paths support for mvs data sets * Added ugly multiple data set tests * Added various new tests for mvs data sets * Added new tests and default dest data set * Added default dest data set * Fixed pep8 issues * Added docs * Added docs * Fixed various sanity issues * removed abc.ABC * Added filters * modified fdict * polish test cases * Added tests * Added record length change * Fixed record length error * fix pylint issue * Add env cleaning in when terse or xmit fails * Moved locking tests below * Added tests for multiple data set in unarchive and modified test archive and unarchive names * Added tests for zos_unarchive * Added replace into zos_data_set invocation * Added replace * Added docs * Added * Added docs * Cleaned docs * Added permission change in zos_unarchive * Added mode support for USS files * Added is_binary option * Added zos_archive and zos_unarchive rst * Updated docs * Updated docs * Updated docs * Removed debug statements * fix pylint issues * Added get restored data sets * Removed replace flag * Added lock process removal * Removed print statements and blank lines * Removed print statements * Removed unused imports * added missing test * Completed uncompleted doc string * Fixed a bug that removed the source when not needed * Fixed pep8 issue * Added removal in correct place * fixed a bug that left behind tmp data sets * Added changes for deleting tmp data sets + pr code review comments * Added a notes and seealso section in the docs * Changed name to xmit_log_data_set * Added comments * Added comments about format * Added more description in terse_pack * Added mode description * Added description for tmp_hlq * Added description for remove * Added 'a' into some statements * Modified dest_state description * Updated option descriptions * Changed badzip error msg * changed adrdssu multiple data set error message * Added tso xmit error message * changed adrdssu multiple data set error message * modified description msg * Updated path to src and changed multiple docs * Fixed module doc issues * Removed unused error and modified error messages * Changed parameter result get * Updated docs in zos_unarchive * Updates docs * Updated docs * Modified mode * Changed src in action * Updated include docs * Updated include docs * Updated docs * Enhanced is_binary support * Fixed dest_data_set issue * Changed docs * Added changes to compute space primary * Added changes to compute space primary * Changed binary test case * Updated docs and test cases * Added xmit_log_data_set docs * Modified dest description * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updaded expanded sources * Updated docs * Updated docs * Updated docs * Updated docs and added dest_data_set * Added dest_data_set * Added primary_space * Fixed pep8 * updated dest_volumes comments * added xmit_log_data_set docs * Updated docs about mode * Added is binary comment * Removed is_binary * Removed is_binary from source * Updated targets list * Updated force docs * Updated the force docs * Updated exclude docs * Updated message for dest in zos_archive * Changed dest to required: true * Removed exclusion_patterns * Updated group in zos_archive * Corrected mode * Updated wording in owner * Updated src * Updated docs * Updated template * Fixed sanity issues * Updated PDS/PDSE to mayus * fixed mem creation * Fixed space type issue * Modifed test for des_data_set * fixed test * Improved dest_data_set * updated docs * updated dest_data_set type * Added latest docs --- .../enhancement_feature.issue.yml | 2 + docs/source/modules/zos_archive.rst | 463 +++++++ docs/source/modules/zos_unarchive.rst | 444 ++++++ plugins/action/zos_unarchive.py | 121 ++ plugins/module_utils/mvs_cmd.py | 24 + plugins/modules/zos_archive.py | 1215 +++++++++++++++++ plugins/modules/zos_unarchive.py | 1156 ++++++++++++++++ .../modules/test_zos_archive_func.py | 900 ++++++++++++ .../modules/test_zos_unarchive_func.py | 988 ++++++++++++++ tests/sanity/ignore-2.13.txt | 2 + tests/sanity/ignore-2.14.txt | 2 + tests/sanity/ignore-2.15.txt | 2 + 12 files changed, 5319 insertions(+) create mode 100644 docs/source/modules/zos_archive.rst create mode 100644 docs/source/modules/zos_unarchive.rst create mode 100644 plugins/action/zos_unarchive.py create mode 100644 plugins/modules/zos_archive.py create mode 100644 plugins/modules/zos_unarchive.py create mode 100644 tests/functional/modules/test_zos_archive_func.py create mode 100644 tests/functional/modules/test_zos_unarchive_func.py diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index f5bc9325f..3e1763091 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -20,6 +20,7 @@ body: description: Select which modules are being reported in this enhancement or feature. You can select more than one. multiple: true options: + - zos_archive - zos_apf - zos_backup_restore - zos_blockinfile @@ -39,6 +40,7 @@ body: - zos_operator_action_query - zos_ping - zos_tso_command + - zos_unarchive validations: required: true - type: textarea diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst new file mode 100644 index 000000000..bb4383f74 --- /dev/null +++ b/docs/source/modules/zos_archive.rst @@ -0,0 +1,463 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_archive.py + +.. _zos_archive_module: + + +zos_archive -- Archive files and data sets on z/OS. +=================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create or extend an archive on a remote z/OS system. +- Sources for archiving must be on the remote z/OS system. +- Supported sources are USS (UNIX System Services) or z/OS data sets. +- The archive remains on the remote z/OS system. +- For supported archive formats, see option ``format``. + + + + + + +Parameters +---------- + + +src + List of names or globs of UNIX System Services (USS) files, PS (sequential data sets), PDS, PDSE to compress or archive. + + USS file paths should be absolute paths. + + MVS data sets supported types are: ``SEQ``, ``PDS``, ``PDSE``. + + VSAMs are not supported. + + | **required**: True + | **type**: list + | **elements**: str + + +format + The compression type and corresponding options to use when archiving data. + + | **required**: False + | **type**: dict + + + name + The compression format to use. + + | **required**: False + | **type**: str + | **default**: gz + | **choices**: bz2, gz, tar, zip, terse, xmit, pax + + + format_options + Options specific to a compression format. + + | **required**: False + | **type**: dict + + + terse_pack + Compression option for use with the terse format, *name=terse*. + + Pack will compress records in a data set so that the output results in lossless data compression. + + Spack will compress records in a data set so the output results in complex data compression. + + Spack will produce smaller output and take approximately 3 times longer than pack compression. + + | **required**: False + | **type**: str + | **choices**: PACK, SPACK + + + xmit_log_data_set + Provide the name of a data set to store xmit log output. + + If the data set provided does not exist, the program will create it. + + If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB + + When providing the *xmit_log_data_set* name, ensure there is adequate space. + + | **required**: False + | **type**: str + + + use_adrdssu + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. + + | **required**: False + | **type**: bool + + + + +dest + The remote absolute path or data set where the archive should be created. + + *dest* can be a USS file or MVS data set name. + + If *dest* has missing parent directories, they will be created. + + If *dest* is a nonexistent USS file, it will be created. + + Destination data set attributes can be set using *dest_data_set*. + + | **required**: True + | **type**: str + + +exclude + Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. + + Patterns (wildcards) can contain one of the following: ?, *. + + * matches everything. + + ? matches any single character. + + | **required**: False + | **type**: list + | **elements**: str + + +group + Name of the group that will own the archive file. + + When left unspecified, it uses the current group of the current use unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if ``dest`` is USS, otherwise ignored. + + | **required**: False + | **type**: str + + +mode + The permission of the destination archive file. + + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + + The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. + + *mode=preserve* means that the file will be given the same permissions as the source file. + + | **required**: False + | **type**: str + + +owner + Name of the user that should own the archive file, as would be passed to the chown command. + + When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if ``dest`` is USS, otherwise ignored. + + | **required**: False + | **type**: str + + +remove + Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *path*. + + | **required**: False + | **type**: bool + + +dest_data_set + Data set attributes to customize a ``dest`` data set to be archived into. + + | **required**: False + | **type**: dict + + + name + Desired name for destination dataset. + + | **required**: False + | **type**: str + + + type + Organization of the destination + + | **required**: False + | **type**: str + | **default**: SEQ + | **choices**: SEQ + + + space_primary + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_secondary + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_type + If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. + + Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + + | **required**: False + | **type**: str + | **choices**: K, M, G, CYL, TRK + + + record_format + If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + + Choices are case-insensitive. + + | **required**: False + | **type**: str + | **choices**: FB, VB, FBA, VBA, U + + + record_length + The length of each record in the data set, in bytes. + + For variable data sets, the length must include the 4-byte prefix area. + + Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0. + + | **required**: False + | **type**: int + + + block_size + The block size to use for the data set. + + | **required**: False + | **type**: int + + + directory_blocks + The number of directory blocks to allocate to the data set. + + | **required**: False + | **type**: int + + + sms_storage_class + The storage class for an SMS-managed dataset. + + Required for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_data_class + The data class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_management_class + The management class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary data sets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + +force + If set to ``true`` and the remote file or data set ``dest`` will be deleted. Otherwise it will be created with the ``dest_data_set`` attributes or default values if ``dest_data_set`` is not specified. + + If set to ``false``, the file or data set will only be copied if the destination does not exist. + + If set to ``false`` and destination exists, the module exits with a note to the user. + + | **required**: False + | **type**: bool + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + # Simple archive + - name: Archive file into a tar + zos_archive: + path: /tmp/archive/foo.txt + dest: /tmp/archive/foo_archive_test.tar + format: + name: tar + + # Archive multiple files + - name: Compress list of files into a zip + zos_archive: + path: + - /tmp/archive/foo.txt + - /tmp/archive/bar.txt + dest: /tmp/archive/foo_bar_archive_test.zip + format: + name: zip + + # Archive one data set into terse + - name: Compress data set into a terse + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + + # Use terse with different options + - name: Compress data set into a terse, specify pack algorithm and use adrdssu + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + terse_pack: "SPACK" + use_adrdssu: True + + # Use a pattern to store + - name: Compress data set pattern using xmit + zos_archive: + path: "USER.ARCHIVE.*" + exclude_sources: "USER.ARCHIVE.EXCLUDE.*" + dest: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit + + + + +Notes +----- + +.. note:: + This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. + + When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_fetch_module` + - :ref:`zos_unarchive_module` + + + + +Return Values +------------- + + +state + The state of the input ``src``. + + ``absent`` when the source files or data sets were removed. + + ``present`` when the source files or data sets were not removed. + + ``incomplete`` when ``remove`` was true and the source files or data sets were not removed. + + | **returned**: always + | **type**: str + +dest_state + The state of the *dest* file or data set. + + ``absent`` when the file does not exist. + + ``archive`` when the file is an archive. + + ``compress`` when the file is compressed, but not an archive. + + ``incomplete`` when the file is an archive, but some files under *path* were not found. + + | **returned**: success + | **type**: str + +missing + Any files or data sets that were missing from the source. + + | **returned**: success + | **type**: list + +archived + Any files or data sets that were compressed or added to the archive. + + | **returned**: success + | **type**: list + +arcroot + If ``src`` is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. + + | **returned**: always + | **type**: str + +expanded_sources + The list of matching paths from the src option. + + | **returned**: always + | **type**: list + +expanded_exclude_sources + The list of matching exclude paths from the exclude option. + + | **returned**: always + | **type**: list + diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst new file mode 100644 index 000000000..ecced2362 --- /dev/null +++ b/docs/source/modules/zos_unarchive.rst @@ -0,0 +1,444 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_unarchive.py + +.. _zos_unarchive_module: + + +zos_unarchive -- Unarchive files and data sets in z/OS. +======================================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- The ``zos_unarchive`` module unpacks an archive after optionally transferring it to the remote system. +- For supported archive formats, see option ``format``. +- Supported sources are USS (UNIX System Services) or z/OS data sets. +- Mixing MVS data sets with USS files for unarchiving is not supported. +- The archive is sent to the remote as binary, so no encoding is performed. + + + + + + +Parameters +---------- + + +src + The remote absolute path or data set of the archive to be uncompressed. + + *src* can be a USS file or MVS data set name. + + USS file paths should be absolute paths. + + MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. + + | **required**: True + | **type**: str + + +format + The compression type and corresponding options to use when archiving data. + + | **required**: True + | **type**: dict + + + name + The compression format to use. + + | **required**: True + | **type**: str + | **choices**: bz2, gz, tar, zip, terse, xmit, pax + + + format_options + Options specific to a compression format. + + | **required**: False + | **type**: dict + + + xmit_log_data_set + Provide the name of a data set to store xmit log output. + + If the data set provided does not exist, the program will create it. + + If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB + + When providing the *xmit_log_data_set* name, ensure there is adequate space. + + | **required**: False + | **type**: str + + + use_adrdssu + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. + + | **required**: False + | **type**: bool + + + dest_volumes + When *use_adrdssu=True*, specify the volume the data sets will be written to. + + If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. + + If the storage administrator has specified a system default unit name and you do not set a volume name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. + + | **required**: False + | **type**: list + | **elements**: str + + + + +dest + The remote absolute path or data set where the content should be unarchived to. + + *dest* can be a USS file, directory or MVS data set name. + + If dest has missing parent directories, they will not be created. + + | **required**: False + | **type**: str + + +group + Name of the group that will own the file system objects. + + When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if ``dest`` is USS, otherwise ignored. + + | **required**: False + | **type**: str + + +mode + The permission of the uncompressed files. + + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + + The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. + + *mode=preserve* means that the file will be given the same permissions as the source file. + + | **required**: False + | **type**: str + + +owner + Name of the user that should own the filesystem object, as would be passed to the chown command. + + When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. + + | **required**: False + | **type**: str + + +include + A list of directories, files or data set names to extract from the archive. + + When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. + + Mutually exclusive with exclude. + + | **required**: False + | **type**: list + | **elements**: str + + +exclude + List the directory and file or data set names that you would like to exclude from the unarchive action. + + Mutually exclusive with include. + + | **required**: False + | **type**: list + | **elements**: str + + +list + Will list the contents of the archive without unpacking. + + | **required**: False + | **type**: bool + + +dest_data_set + Data set attributes to customize a ``dest`` data set that the archive will be copied into. + + | **required**: False + | **type**: dict + + + name + Desired name for destination dataset. + + | **required**: False + | **type**: str + + + type + Organization of the destination + + | **required**: False + | **type**: str + | **default**: SEQ + | **choices**: SEQ, PDS, PDSE + + + space_primary + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_secondary + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_type + If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. + + Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + + | **required**: False + | **type**: str + | **choices**: K, M, G, CYL, TRK + + + record_format + If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + + Choices are case-insensitive. + + | **required**: False + | **type**: str + | **choices**: FB, VB, FBA, VBA, U + + + record_length + The length of each record in the data set, in bytes. + + For variable data sets, the length must include the 4-byte prefix area. + + Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0. + + | **required**: False + | **type**: int + + + block_size + The block size to use for the data set. + + | **required**: False + | **type**: int + + + directory_blocks + The number of directory blocks to allocate to the data set. + + | **required**: False + | **type**: int + + + key_offset + The key offset to use when creating a KSDS data set. + + *key_offset* is required when *type=KSDS*. + + *key_offset* should only be provided when *type=KSDS* + + | **required**: False + | **type**: int + + + key_length + The key length to use when creating a KSDS data set. + + *key_length* is required when *type=KSDS*. + + *key_length* should only be provided when *type=KSDS* + + | **required**: False + | **type**: int + + + sms_storage_class + The storage class for an SMS-managed dataset. + + Required for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_data_class + The data class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_management_class + The management class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary data sets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + +force + If set to true and the remote file or data set dest exists, the dest will be deleted. + + | **required**: False + | **type**: bool + + +remote_src + If set to true, ``zos_unarchive`` retrieves the archive from the remote system. + + If set to false, ``zos_unarchive`` searches the local machine (Ansible controller) for the archive. + + | **required**: False + | **type**: bool + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + # Simple extract + - name: Copy local tar file and unpack it on the managed z/OS node. + zos_unarchive: + path: "./files/archive_folder_test.tar" + format: + name: tar + + # use include + - name: Unarchive a bzip file selecting only a file to unpack. + zos_unarchive: + path: "/tmp/test.bz2" + format: + name: bz2 + include: + - 'foo.txt' + + # Use exclude + - name: Unarchive a terse data set and excluding data sets from unpacking. + zos_unarchive: + path: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + exclude: + - USER.ARCHIVE.TEST1 + - USER.ARCHIVE.TEST2 + + # List option + - name: List content from XMIT + zos_unarchive: + path: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit + format_options: + use_adrdssu: True + list: True + + + + +Notes +----- + +.. note:: + VSAMs are not supported. + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_unarchive_module` + + + + +Return Values +------------- + + +path + File path or data set name unarchived. + + | **returned**: always + | **type**: str + +dest_path + Destination path where archive was extracted. + + | **returned**: always + | **type**: str + +targets + List of files or data sets in the archive. + + | **returned**: success + | **type**: list + | **elements**: str + +missing + Any files or data sets not found during extraction. + + | **returned**: success + | **type**: str + diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py new file mode 100644 index 000000000..7c310a4a3 --- /dev/null +++ b/plugins/action/zos_unarchive.py @@ -0,0 +1,121 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.plugins.action import ActionBase +from ansible.utils.display import Display +from ansible.module_utils.parsing.convert_bool import boolean +import os +import copy +from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule + + +USS_SUPPORTED_FORMATS = ['tar', 'zip', 'bz2', 'pax', 'gz'] +MVS_SUPPORTED_FORMATS = ['terse', 'xmit'] + +display = Display() + + +def _process_boolean(arg, default=False): + try: + return boolean(arg) + except TypeError: + return default + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + if task_vars is None: + task_vars = dict() + + result = super(ActionModule, self).run(tmp, task_vars) + + if result.get("skipped"): + return result + + module_args = self._task.args.copy() + + if module_args.get("remote_src", False): + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_unarchive", + module_args=module_args, + task_vars=task_vars, + ) + ) + else: + source = module_args.get("src") + force = _process_boolean(module_args.get("force")) + format = self._task.args.get("format") + format_name = format.get("name") + copy_module_args = dict() + dest_data_set = format.get("dest_data_set") + dest = "" + if source.startswith('~'): + source = os.path.expanduser(source) + source = os.path.realpath(source) + + if format_name in USS_SUPPORTED_FORMATS: + dest = self._execute_module( + module_name="tempfile", module_args={}, task_vars=task_vars, + ).get("path") + elif format_name in MVS_SUPPORTED_FORMATS: + tmp_hlq = module_args.get("tmp_hlq") if module_args.get("tmp_hlq") is not None else "" + cmd_res = self._execute_module( + module_name="command", + module_args=dict( + _raw_params="mvstmp {0}".format(tmp_hlq) + ), + task_vars=task_vars, + ) + dest = cmd_res.get("stdout") + if dest_data_set is None: + if format_name == 'terse': + dest_data_set = dict(type='SEQ', record_format='FB', record_length=1024) + if format_name == 'xmit': + dest_data_set = dict(type='SEQ', record_format='FB', record_length=80) + else: + # Raise unsupported format name + None + + copy_module_args.update( + dict( + src=source, + dest=dest, + dest_data_set=dest_data_set, + force=force, + is_binary=True, + ) + ) + copy_task = copy.deepcopy(self._task) + copy_task.args = copy_module_args + zos_copy_action_module = ZosCopyActionModule(task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj) + result.update(zos_copy_action_module.run(task_vars=task_vars)) + + module_args["src"] = dest + display.vvv(u"Copy args {0}".format(result), host=self._play_context.remote_addr) + + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_unarchive", + module_args=module_args, + task_vars=task_vars, + ) + ) + return result diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index bf9b28556..21d2b5a7e 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -153,6 +153,30 @@ def iehlist(cmd, dds=None, authorized=False): return _run_mvs_command("IEHLIST", cmd, dds, authorized) +def amaterse(cmd="", dds=None, authorized=False): + """AMATERSE is a service aid program that operates in problem state. + You can use AMATERSE to pack a data set before transmitting a copy + to another site, typically employing FTP as the transmission mechanism. + A complementary unpack service is provided to create a similar data set + at the receiving site. + Arguments: + dds {dict} -- Any DD statements to pass to MVS command + authorized {bool} -- Whether the command should be run in authorized + mode + """ + return _run_mvs_command("AMATERSE", "", dds, authorized) + + +def adrdssu(cmd, dds=None, authorized=False): + """The ADRDSSU program enables you to copy SMS-compressed data without + having to decompress the data and also provides support for copying + wildcard-named files. + Is a DFSMSdss utility that provides backup and recovery functions + at both the data set and volume levels. + """ + return _run_mvs_command("ADRDSSU", cmd, dds, authorized) + + def _run_mvs_command(pgm, cmd, dd=None, authorized=False): """Run a particular MVS command. diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py new file mode 100644 index 000000000..8b887e1bf --- /dev/null +++ b/plugins/modules/zos_archive.py @@ -0,0 +1,1215 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: zos_archive +version_added: "1.7.0" +author: + - Oscar Fernando Flores Garcia (@fernandofloresg) +short_description: Archive files and data sets on z/OS. + +description: + - Create or extend an archive on a remote z/OS system. + - Sources for archiving must be on the remote z/OS system. + - Supported sources are USS (UNIX System Services) or z/OS data sets. + - The archive remains on the remote z/OS system. + - For supported archive formats, see option C(format). + +options: + src: + description: + - List of names or globs of UNIX System Services (USS) files, + PS (sequential data sets), PDS, PDSE to compress or archive. + - USS file paths should be absolute paths. + - "MVS data sets supported types are: C(SEQ), C(PDS), C(PDSE)." + - VSAMs are not supported. + type: list + required: true + elements: str + format: + description: + - The compression type and corresponding options to use when archiving + data. + type: dict + required: false + suboptions: + name: + description: + - The compression format to use. + type: str + required: false + default: gz + choices: + - bz2 + - gz + - tar + - zip + - terse + - xmit + - pax + format_options: + description: + - Options specific to a compression format. + type: dict + required: false + suboptions: + terse_pack: + description: + - Compression option for use with the terse format, + I(name=terse). + - Pack will compress records in a data set so that the output + results in lossless data compression. + - Spack will compress records in a data set so the output results + in complex data compression. + - Spack will produce smaller output and take approximately 3 + times longer than pack compression. + type: str + required: false + choices: + - PACK + - SPACK + xmit_log_data_set: + description: + - Provide the name of a data set to store xmit log output. + - If the data set provided does not exist, the program + will create it. + - "If the data set provided exists, the data set must have + the following attributes: LRECL=255, BLKSIZE=3120, and + RECFM=VB" + - When providing the I(xmit_log_data_set) name, ensure there + is adequate space. + type: str + use_adrdssu: + description: + - If set to true, the C(zos_archive) module will use Data + Facility Storage Management Subsystem data set services + (DFSMSdss) program ADRDSSU to compress data sets into a + portable format before using C(xmit) or C(terse). + type: bool + default: false + dest: + description: + - The remote absolute path or data set where the archive should be + created. + - I(dest) can be a USS file or MVS data set name. + - If I(dest) has missing parent directories, they will be created. + - If I(dest) is a nonexistent USS file, it will be created. + - Destination data set attributes can be set using I(dest_data_set). + type: str + required: true + exclude: + description: + - Remote absolute path, glob, or list of paths, globs or data set name + patterns for the file, files or data sets to exclude from path list + and glob expansion. + - "Patterns (wildcards) can contain one of the following: ?, *." + - "* matches everything." + - "? matches any single character." + type: list + required: false + elements: str + group: + description: + - Name of the group that will own the archive file. + - When left unspecified, it uses the current group of the current use + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false + mode: + description: + - The permission of the destination archive file. + - If C(dest) is USS, this will act as Unix file mode, otherwise + ignored. + - It should be noted that modes are octal numbers. + The user must either add a leading zero so that Ansible's YAML + parser knows it is an octal number (like C(0644) or C(01777))or + quote it (like C('644') or C('1777')) so Ansible receives a string + and can do its own conversion from string into number. Giving Ansible + a number without following one of these rules will end up with a + decimal number which will have unexpected results. + - The mode may also be specified as a symbolic mode + (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special + string 'preserve'. + - I(mode=preserve) means that the file will be given the same permissions + as the source file. + type: str + required: false + owner: + description: + - Name of the user that should own the archive file, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false + remove: + description: + - Remove any added source files , trees or data sets after module + L(zos_archive,./zos_archive.html) adds them to the archive. + Source files, trees and data sets are identified with option I(path). + type: bool + required: false + default: false + dest_data_set: + description: + - Data set attributes to customize a C(dest) data set to be archived into. + required: false + type: dict + suboptions: + name: + description: + - Desired name for destination dataset. + type: str + required: false + type: + description: + - Organization of the destination + type: str + required: false + default: SEQ + choices: + - SEQ + space_primary: + description: + - If the destination I(dest) data set does not exist , this sets the + primary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_secondary: + description: + - If the destination I(dest) data set does not exist , this sets the + secondary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_type: + description: + - If the destination data set does not exist, this sets the unit of + measurement to use when defining primary and secondary space. + - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + type: str + choices: + - K + - M + - G + - CYL + - TRK + required: false + record_format: + description: + - If the destination data set does not exist, this sets the format of + the + data set. (e.g C(FB)) + - Choices are case-insensitive. + required: false + choices: + - FB + - VB + - FBA + - VBA + - U + type: str + record_length: + description: + - The length of each record in the data set, in bytes. + - For variable data sets, the length must include the 4-byte prefix + area. + - "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, + if U 0." + type: int + required: false + block_size: + description: + - The block size to use for the data set. + type: int + required: false + directory_blocks: + description: + - The number of directory blocks to allocate to the data set. + type: int + required: false + sms_storage_class: + description: + - The storage class for an SMS-managed dataset. + - Required for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_data_class: + description: + - The data class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_management_class: + description: + - The management class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary data + sets. + - The default HLQ is the Ansible user used to execute the module and + if that is not available, then the environment variable value + C(TMPHLQ) is used. + required: false + type: str + force: + description: + - If set to C(true) and the remote file or data set C(dest) will be + deleted. Otherwise it will be created with the C(dest_data_set) + attributes or default values if C(dest_data_set) is not specified. + - If set to C(false), the file or data set will only be copied if the + destination does not exist. + - If set to C(false) and destination exists, the module exits with a + note to the user. + type: bool + default: false + required: false + +notes: + - This module does not perform a send or transmit operation to a remote + node. If you want to transport the archive you can use zos_fetch to + retrieve to the controller and then zos_copy or zos_unarchive for + copying to a remote or send to the remote and then unpack the archive + respectively. + - When packing and using C(use_adrdssu) flag the module will take up to two + times the space indicated in C(dest_data_set). + + +seealso: + - module: zos_fetch + - module: zos_unarchive +''' + +EXAMPLES = r''' +# Simple archive +- name: Archive file into a tar + zos_archive: + path: /tmp/archive/foo.txt + dest: /tmp/archive/foo_archive_test.tar + format: + name: tar + +# Archive multiple files +- name: Compress list of files into a zip + zos_archive: + path: + - /tmp/archive/foo.txt + - /tmp/archive/bar.txt + dest: /tmp/archive/foo_bar_archive_test.zip + format: + name: zip + +# Archive one data set into terse +- name: Compress data set into a terse + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + +# Use terse with different options +- name: Compress data set into a terse, specify pack algorithm and use adrdssu + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + terse_pack: "SPACK" + use_adrdssu: True + +# Use a pattern to store +- name: Compress data set pattern using xmit + zos_archive: + path: "USER.ARCHIVE.*" + exclude_sources: "USER.ARCHIVE.EXCLUDE.*" + dest: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit +''' + +RETURN = r''' +state: + description: + - The state of the input C(src). + - C(absent) when the source files or data sets were removed. + - C(present) when the source files or data sets were not removed. + - C(incomplete) when C(remove) was true and the source files or + data sets were not removed. + type: str + returned: always +dest_state: + description: + - The state of the I(dest) file or data set. + - C(absent) when the file does not exist. + - C(archive) when the file is an archive. + - C(compress) when the file is compressed, but not an archive. + - C(incomplete) when the file is an archive, but some files under + I(path) were not found. + type: str + returned: success +missing: + description: Any files or data sets that were missing from the source. + type: list + returned: success +archived: + description: + - Any files or data sets that were compressed or added to the + archive. + type: list + returned: success +arcroot: + description: + - If C(src) is a list of USS files, this returns the top most parent + folder of the list of files, otherwise is empty. + type: str + returned: always +expanded_sources: + description: The list of matching paths from the src option. + type: list + returned: always +expanded_exclude_sources: + description: The list of matching exclude paths from the exclude option. + type: list + returned: always +''' + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser, + data_set, + mvs_cmd) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingZOAUImport, +) +import os +import tarfile +import zipfile +import abc +import glob +import re + + +try: + from zoautil_py import datasets +except Exception: + Datasets = MissingZOAUImport() + +XMIT_RECORD_LENGTH = 80 +AMATERSE_RECORD_LENGTH = 1024 + +STATE_ABSENT = 'absent' +STATE_ARCHIVE = 'archive' +STATE_COMPRESSED = 'compressed' +STATE_INCOMPLETE = 'incomplete' + + +def get_archive_handler(module): + """ + Return the proper archive handler based on archive format. + Arguments: + format: {str} + Returns: + Archive: {Archive} + + """ + format = module.params.get("format").get("name") + if format in ["tar", "gz", "bz2", "pax"]: + return TarArchive(module) + elif format == "terse": + return AMATerseArchive(module) + elif format == "xmit": + return XMITArchive(module) + return ZipArchive(module) + + +def strip_prefix(prefix, string): + return string[len(prefix):] if string.startswith(prefix) else string + + +def expand_paths(paths): + expanded_path = [] + for path in paths: + if '*' in path or '?' in path: + e_paths = glob.glob(path) + else: + e_paths = [path] + expanded_path.extend(e_paths) + return expanded_path + + +def is_archive(path): + return re.search(r'\.(tar|tar\.(gz|bz2|xz)|tgz|tbz2|zip|gz|bz2|xz|pax)$', os.path.basename(path), re.IGNORECASE) + + +class Archive(): + def __init__(self, module): + self.module = module + self.dest = module.params['dest'] + self.format = module.params.get("format").get("name") + self.remove = module.params['remove'] + self.changed = False + self.errors = [] + self.found = [] + self.targets = [] + self.archived = [] + self.not_found = [] + self.force = module.params['force'] + self.sources = module.params['src'] + self.arcroot = "" + self.expanded_sources = "" + self.expanded_exclude_sources = "" + self.dest_state = STATE_ABSENT + + def targets_exist(self): + return bool(self.targets) + + @abc.abstractmethod + def dest_exists(self): + pass + + @abc.abstractmethod + def dest_type(self): + pass + + @abc.abstractmethod + def update_permissions(self): + return + + @abc.abstractmethod + def find_targets(self): + pass + + @abc.abstractmethod + def _get_checksums(self, path): + pass + + @abc.abstractmethod + def dest_checksums(self): + pass + + @abc.abstractmethod + def is_different_from_original(self): + pass + + @abc.abstractmethod + def remove_targets(self): + pass + + @property + def result(self): + return { + 'archived': self.archived, + 'dest': self.dest, + 'arcroot': self.arcroot, + 'dest_state': self.dest_state, + 'changed': self.changed, + 'missing': self.not_found, + 'expanded_sources': list(self.expanded_sources), + 'expanded_exclude_sources': list(self.expanded_exclude_sources), + } + + +class USSArchive(Archive): + def __init__(self, module): + super(USSArchive, self).__init__(module) + self.original_checksums = self.dest_checksums() + if len(self.sources) == 1: + self.arcroot = os.path.dirname(os.path.commonpath(self.sources)) + else: + self.arcroot = os.path.commonpath(self.sources) + self.expanded_sources = expand_paths(self.sources) + self.expanded_exclude_sources = expand_paths(module.params['exclude']) + self.expanded_exclude_sources = "" if len(self.expanded_exclude_sources) == 0 else self.expanded_exclude_sources + + self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) + + def dest_exists(self): + return os.path.exists(self.dest) + + def dest_type(self): + return "USS" + + def update_permissions(self): + file_args = self.module.load_file_common_arguments(self.module.params, path=self.dest) + self.changed = self.module.set_fs_attributes_if_different(file_args, self.changed) + + def find_targets(self): + for path in self.sources: + if os.path.exists(path): + self.targets.append(path) + else: + self.not_found.append(path) + + def _get_checksums(self, path): + md5_cmd = "md5 -r \"{0}\"".format(path) + rc, out, err = self.module.run_command(md5_cmd) + checksums = out.split(" ")[0] + return checksums + + def dest_checksums(self): + if self.dest_exists(): + return self._get_checksums(self.dest) + return None + + def is_different_from_original(self): + if self.original_checksums is not None: + return self.original_checksums != self.dest_checksums() + return True + + def remove_targets(self): + for target in self.archived: + if os.path.isdir(target): + os.removedirs(target) + else: + os.remove(target) + + def archive_targets(self): + self.file = self.open(self.dest) + + try: + for target in self.targets: + if os.path.isdir(target): + for directory_path, directory_names, file_names in os.walk(target, topdown=True): + for directory_name in directory_names: + full_path = os.path.join(directory_path, directory_name) + self.add(full_path, strip_prefix(self.arcroot, full_path)) + + for file_name in file_names: + full_path = os.path.join(directory_path, file_name) + self.add(full_path, strip_prefix(self.arcroot, full_path)) + else: + self.add(target, strip_prefix(self.arcroot, target)) + except Exception as e: + self.dest_state = STATE_INCOMPLETE + if self.format == 'tar': + archive_format = self.format + else: + archive_format = 'tar.' + self.format + self.module.fail_json( + msg='Error when writing %s archive at %s: %s' % ( + archive_format, self.destination, e + ), + exception=e + ) + self.file.close() + + def add(self, source, arcname): + self._add(source, arcname) + self.archived.append(source) + + def get_state(self): + if not self.dest_exists(): + self.dest_state = STATE_ABSENT + else: + if is_archive(self.dest): + self.dest_state = STATE_ARCHIVE + if bool(self.not_found): + self.dest_state = STATE_INCOMPLETE + + +class TarArchive(USSArchive): + def __init__(self, module): + super(TarArchive, self).__init__(module) + + def open(self, path): + if self.format == 'tar': + file = tarfile.open(path, 'w') + elif self.format == 'pax': + file = tarfile.open(path, 'w', format=tarfile.GNU_FORMAT) + elif self.format in ('gz', 'bz2'): + file = tarfile.open(path, 'w|' + self.format) + return file + + def _add(self, source, arcname): + self.file.add(source, arcname) + + +class ZipArchive(USSArchive): + def __init__(self, module): + super(ZipArchive, self).__init__(module) + + def open(self, path): + try: + file = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED, True) + except zipfile.BadZipFile: + self.module.fail_json( + msg="Improperly compressed zip file, unable to to open file {0} ".format(path) + ) + return file + + def _add(self, source, arcname): + self.file.write(source, arcname) + + +class MVSArchive(Archive): + def __init__(self, module): + super(MVSArchive, self).__init__(module) + self.original_checksums = self.dest_checksums() + self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") + self.expanded_sources = self.expand_mvs_paths(self.sources) + self.expanded_exclude_sources = self.expand_mvs_paths(module.params['exclude']) + self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) + self.tmp_data_sets = list() + self.dest_data_set = module.params.get("dest_data_set") + self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set + self.tmphlq = module.params.get("tmp_hlq") + + def open(self): + pass + + def close(self): + pass + + def find_targets(self): + """ + Finds target datasets in host. + """ + for path in self.sources: + if data_set.DataSet.data_set_exists(path): + self.targets.append(path) + else: + self.not_found.append(path) + + def _compute_dest_data_set_size(self): + """ + Computes the attributes that the destination data set or temporary destination + data set should have in terms of size, record_length, etc. + """ + + """ + - Size of temporary DS for archive handling. + + If remote_src then we can get the source_size from archive on the system. + + If not remote_src then we can get the source_size from temporary_ds. + Both are named src so no problemo. + + If format is xmit, dest_data_set size is the same as source_size. + + If format is terse, dest_data_set size is different than the source_size, has to be greater, + but how much? In this case we can add dest_data_set option. + + Apparently the only problem is when format name is terse. + """ + + # Get the size from the system + default_size = 5 + dest_space_type = 'M' + dest_primary_space = int(default_size) + return dest_primary_space, dest_space_type + + def _create_dest_data_set( + self, + name=None, + replace=None, + type=None, + space_primary=None, + space_secondary=None, + space_type=None, + record_format=None, + record_length=None, + block_size=None, + directory_blocks=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + volumes=None, + tmp_hlq=None, + force=None, + ): + """Create a temporary data set. + + Arguments: + tmp_hlq(str): A HLQ specified by the user for temporary data sets. + + Returns: + str: Name of the temporary data set created. + """ + arguments = locals() + if name is None: + if tmp_hlq: + hlq = tmp_hlq + else: + rc, hlq, err = self.module.run_command("hlq") + hlq = hlq.replace('\n', '') + cmd = "mvstmphelper {0}.DZIP".format(hlq) + rc, temp_ds, err = self.module.run_command(cmd) + arguments.update(name=temp_ds.replace('\n', '')) + + if record_format is None: + arguments.update(record_format="FB") + if record_length is None: + arguments.update(record_length=80) + if type is None: + arguments.update(type="SEQ") + if space_primary is None: + arguments.update(space_primary=5) + if space_secondary is None: + arguments.update(space_secondary=3) + if space_type is None: + arguments.update(space_type="M") + arguments.pop("self") + changed = data_set.DataSet.ensure_present(**arguments) + return arguments["name"], changed + + def create_dest_ds(self, name): + """ + Create destination data set to use as an archive. + Arguments: + name: {str} + Returns: + name {str} - name of the newly created data set. + """ + record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH + changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) + # rc, out, err = self.module.run_command(cmd) + + # if not changed: + # self.module.fail_json( + # msg="Failed preparing {0} to be used as an archive".format(name), + # stdout=out, + # stderr=err, + # stdout_lines=cmd, + # rc=rc, + # ) + return name + + def dump_into_temp_ds(self, temp_ds): + """ + Dump src datasets identified as self.targets into a temporary dataset using ADRDSSU. + """ + dump_cmd = """ DUMP OUTDDNAME(TARGET) - + OPTIMIZE(4) DS(INCL( - """ + + for target in self.targets: + dump_cmd += "\n {0}, - ".format(target) + dump_cmd += '\n ) ' + + if self.force: + dump_cmd += '- \n ) TOL( ENQF IOER ) ' + + dump_cmd += ' )' + dds = dict(target="{0},old".format(temp_ds)) + rc, out, err = mvs_cmd.adrdssu(cmd=dump_cmd, dds=dds, authorized=True) + + if rc != 0: + self.module.fail_json( + msg="Failed executing ADRDSSU to archive {0}".format(temp_ds), + stdout=out, + stderr=err, + stdout_lines=dump_cmd, + rc=rc, + ) + return rc + + def _get_checksums(self, path): + md5_cmd = "md5 -r \"//'{0}'\"".format(path) + rc, out, err = self.module.run_command(md5_cmd) + checksums = out.split(" ")[0] + return checksums + + def dest_checksums(self): + if self.dest_exists(): + return self._get_checksums(self.dest) + return None + + def is_different_from_original(self): + if self.original_checksums is not None: + return self.original_checksums != self.dest_checksums() + return True + + def dest_type(self): + return "MVS" + + def dest_exists(self): + return data_set.DataSet.data_set_exists(self.dest) + + def remove_targets(self): + for target in self.archived: + data_set.DataSet.ensure_absent(target) + return + + def expand_mvs_paths(self, paths): + expanded_path = [] + for path in paths: + if '*' in path: + e_paths = datasets.listing(path) + e_paths = [path.name for path in e_paths] + else: + e_paths = [path] + expanded_path.extend(e_paths) + return expanded_path + + def get_state(self): + if not self.dest_exists(): + self.dest_state = STATE_ABSENT + else: + if bool(self.not_found): + self.dest_state = STATE_INCOMPLETE + elif bool(self.archived): + self.dest_state = STATE_ARCHIVE + + def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False): + """Removes any allocated data sets that won't be needed after module termination. + Arguments: + data_sets - {list(str)} : list of data sets to remove + uss_files - {list(str)} : list of uss files to remove + remove_targets - bool : Indicates if already unpacked data sets need to be removed too. + """ + if data_set is not None: + for ds in data_sets: + data_set.DataSet.ensure_absent(ds) + if uss_files is not None: + for file in uss_files: + os.remove(file) + if remove_targets: + for target in self.targets: + data_set.DataSet.ensure_absent(target) + + +class AMATerseArchive(MVSArchive): + def __init__(self, module): + super(AMATerseArchive, self).__init__(module) + self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + if self.pack_arg is None: + self.pack_arg = "SPACK" + + def add(self, src, archive): + """ + Archive src into archive using AMATERSE program. + Arguments: + src: {str} + archive: {str} + """ + dds = {'args': self.pack_arg, 'sysut1': src, 'sysut2': archive} + rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) + if rc != 0: + self.module.fail_json( + msg="Failed executing AMATERSE to archive {0} into {1}".format(src, archive), + stdout=out, + stderr=err, + rc=rc, + ) + self.archived = self.targets[:] + return rc + + def archive_targets(self): + """ + Add MVS Datasets to the AMATERSE Archive by creating a temporary dataset and dumping the source datasets into it. + """ + if self.use_adrdssu: + source, changed = self._create_dest_data_set( + type="SEQ", + record_format="U", + record_length=0, + tmp_hlq=self.tmphlq, + replace=True, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.dump_into_temp_ds(source) + self.tmp_data_sets.append(source) + else: + # If we don't use a adrdssu container we cannot pack multiple data sets + if len(self.targets) > 1: + self.module.fail_json( + msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + source = self.targets[0] + # dest = self.create_dest_ds(self.dest) + dest, changed = self._create_dest_data_set( + name=self.dest, + replace=True, + type='SEQ', + record_format='FB', + record_length=AMATERSE_RECORD_LENGTH, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.changed = self.changed or changed + self.add(source, dest) + self.clean_environment(data_sets=self.tmp_data_sets) + + +class XMITArchive(MVSArchive): + def __init__(self, module): + super(XMITArchive, self).__init__(module) + self.xmit_log_data_set = module.params.get("format").get("format_options").get("xmit_log_data_set") + + def add(self, src, archive): + """ + Archive src into archive using TSO XMIT. + Arguments: + src: {str} + archive: {str} + """ + log_option = "LOGDSNAME({0})".format(self.xmit_log_data_set) if self.xmit_log_data_set else "NOLOG" + xmit_cmd = """ XMIT A.B - + FILE(SYSUT1) OUTFILE(SYSUT2) - + {0} - + """.format(log_option) + dds = {"SYSUT1": "{0},shr".format(src), "SYSUT2": archive} + rc, out, err = mvs_cmd.ikjeft01(cmd=xmit_cmd, authorized=True, dds=dds) + if rc != 0: + self.module.fail_json( + msg="An error occurred while executing 'TSO XMIT' to archive {0} into {1}".format(src, archive), + stdout=out, + stderr=err, + rc=rc, + ) + self.archived = self.targets[:] + return rc + + def archive_targets(self): + """ + Adds MVS Datasets to the TSO XMIT Archive by creating a temporary dataset and dumping the source datasets into it. + """ + if self.use_adrdssu: + source, changed = self._create_dest_data_set( + type="SEQ", + record_format="U", + record_length=0, + tmp_hlq=self.tmphlq, + replace=True, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.dump_into_temp_ds(source) + self.tmp_data_sets.append(source) + else: + # If we don't use a adrdssu container we cannot pack multiple data sets + if len(self.sources) > 1: + self.module.fail_json( + msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + source = self.sources[0] + # dest = self.create_dest_ds(self.dest) + dest, changed = self._create_dest_data_set( + name=self.dest, + replace=True, + type='SEQ', + record_format='FB', + record_length=XMIT_RECORD_LENGTH, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.changed = self.changed or changed + self.add(source, dest) + self.clean_environment(data_sets=self.tmp_data_sets) + + +def run_module(): + module = AnsibleModule( + argument_spec=dict( + src=dict(type='list', elements='str', required=True), + dest=dict(type='str', required=True), + exclude=dict(type='list', elements='str'), + format=dict( + type='dict', + options=dict( + name=dict( + type='str', + default='gz', + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + terse_pack=dict( + type='str', + choices=['PACK', 'SPACK'], + ), + xmit_log_data_set=dict( + type='str', + ), + use_adrdssu=dict( + type='bool', + default=False, + ) + ), + ), + ) + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + remove=dict(type='bool', default=False), + dest_data_set=dict( + type='dict', + required=False, + options=dict( + name=dict( + type='str', required=False, + ), + type=dict( + type='str', + choices=['SEQ'], + required=False, + default="SEQ", + ), + space_primary=dict( + type='int', required=False), + space_secondary=dict( + type='int', required=False), + space_type=dict( + type='str', + choices=['K', 'M', 'G', 'CYL', 'TRK'], + required=False, + ), + record_format=dict( + type='str', + choices=["FB", "VB", "FBA", "VBA", "U"], + required=False + ), + record_length=dict(type='int', required=False), + block_size=dict(type='int', required=False), + directory_blocks=dict(type="int", required=False), + sms_storage_class=dict(type="str", required=False), + sms_data_class=dict(type="str", required=False), + sms_management_class=dict(type="str", required=False), + ) + ), + tmp_hlq=dict(type='str'), + force=dict(type='bool', default=False) + ), + supports_check_mode=True, + ) + + arg_defs = dict( + src=dict(type='list', elements='str', required=True), + dest=dict(type='str', required=True), + exclude=dict(type='list', elements='str', default=[]), + format=dict( + type='dict', + options=dict( + name=dict( + type='str', + default='gz', + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + terse_pack=dict( + type='str', + required=False, + choices=['PACK', 'SPACK'], + ), + xmit_log_data_set=dict( + type='str', + required=False, + ), + use_adrdssu=dict( + type='bool', + default=False, + ) + ), + default=dict( + terse_pack="SPACK", + xmit_log_data_set="", + use_adrdssu=False), + ), + ), + default=dict( + name="", + format_options=dict( + terse_pack="SPACK", + xmit_log_data_set="", + use_adrdssu=False + ) + ), + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + remove=dict(type='bool', default=False), + dest_data_set=dict( + arg_type='dict', + required=False, + options=dict( + name=dict(arg_type='str', required=False), + type=dict(arg_type='str', required=False, default="SEQ"), + space_primary=dict(arg_type='int', required=False), + space_secondary=dict( + arg_type='int', required=False), + space_type=dict(arg_type='str', required=False), + record_format=dict( + arg_type='str', required=False), + record_length=dict(type='int', required=False), + block_size=dict(arg_type='int', required=False), + directory_blocks=dict(arg_type="int", required=False), + sms_storage_class=dict(arg_type="str", required=False), + sms_data_class=dict(arg_type="str", required=False), + sms_management_class=dict(arg_type="str", required=False), + ) + ), + tmp_hlq=dict(type='qualifier_or_empty', default=''), + force=dict(type='bool', default=False) + ) + + result = dict( + changed=False, + original_message='', + message='' + ) + if module.check_mode: + module.exit_json(**result) + + try: + parser = better_arg_parser.BetterArgParser(arg_defs) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json(msg="Parameter verification failed", stderr=str(err)) + + archive = get_archive_handler(module) + + if archive.dest_exists() and not archive.force: + module.fail_json(msg="%s file exists. Use force flag to replace dest" % archive.dest) + + archive.find_targets() + if archive.targets_exist(): + archive.archive_targets() + if archive.remove: + archive.remove_targets() + if archive.dest_exists(): + if archive.dest_type() == "USS": + archive.update_permissions() + archive.changed = archive.is_different_from_original() + archive.get_state() + + module.exit_json(**archive.result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py new file mode 100644 index 000000000..3f79fc789 --- /dev/null +++ b/plugins/modules/zos_unarchive.py @@ -0,0 +1,1156 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: zos_unarchive +version_added: "1.7.0" +author: + - Oscar Fernando Flores Garcia (@fernandofloresg) +short_description: Unarchive files and data sets in z/OS. +description: + - The C(zos_unarchive) module unpacks an archive after optionally + transferring it to the remote system. + - For supported archive formats, see option C(format). + - Supported sources are USS (UNIX System Services) or z/OS data sets. + - Mixing MVS data sets with USS files for unarchiving is not supported. + - The archive is sent to the remote as binary, so no encoding is performed. + + +options: + src: + description: + - The remote absolute path or data set of the archive to be uncompressed. + - I(src) can be a USS file or MVS data set name. + - USS file paths should be absolute paths. + - MVS data sets supported types are C(SEQ), C(PDS), C(PDSE). + type: str + required: true + format: + description: + - The compression type and corresponding options to use when archiving + data. + type: dict + required: true + suboptions: + name: + description: + - The compression format to use. + type: str + required: true + choices: + - bz2 + - gz + - tar + - zip + - terse + - xmit + - pax + format_options: + description: + - Options specific to a compression format. + type: dict + required: false + suboptions: + xmit_log_data_set: + description: + - Provide the name of a data set to store xmit log output. + - If the data set provided does not exist, the program + will create it. + - 'If the data set provided exists, the data set must have + the following attributes: LRECL=255, BLKSIZE=3120, and + RECFM=VB' + - When providing the I(xmit_log_data_set) name, ensure there + is adequate space. + type: str + use_adrdssu: + description: + - If set to true, the C(zos_archive) module will use Data + Facility Storage Management Subsystem data set services + (DFSMSdss) program ADRDSSU to uncompress data sets from + a portable format after using C(xmit) or C(terse). + type: bool + default: False + dest_volumes: + description: + - When I(use_adrdssu=True), specify the volume the data sets + will be written to. + - If no volume is specified, storage management rules will be + used to determine the volume where the file will be + unarchived. + - If the storage administrator has specified a system default + unit name and you do not set a volume name for + non-system-managed data sets, then the system uses the + volumes associated with the default unit name. Check with + your storage administrator to determine whether a default + unit name has been specified. + type: list + elements: str + dest: + description: + - The remote absolute path or data set where the content should be unarchived to. + - I(dest) can be a USS file, directory or MVS data set name. + - If dest has missing parent directories, they will not be created. + type: str + required: false + group: + description: + - Name of the group that will own the file system objects. + - When left unspecified, it uses the current group of the current user + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false + mode: + description: + - The permission of the uncompressed files. + - If C(dest) is USS, this will act as Unix file mode, otherwise ignored. + - It should be noted that modes are octal numbers. + The user must either add a leading zero so that Ansible's YAML parser + knows it is an octal number (like C(0644) or C(01777))or quote it + (like C('644') or C('1777')) so Ansible receives a string and can do + its own conversion from string into number. Giving Ansible a number + without following one of these rules will end up with a decimal number + which will have unexpected results. + - The mode may also be specified as a symbolic mode + (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special + string `preserve`. + - I(mode=preserve) means that the file will be given the same permissions + as + the source file. + type: str + required: false + owner: + description: + - Name of the user that should own the filesystem object, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + type: str + required: false + include: + description: + - A list of directories, files or data set names to extract from the + archive. + - When C(include) is set, only those files will we be extracted leaving + the remaining files in the archive. + - Mutually exclusive with exclude. + type: list + elements: str + required: false + exclude: + description: + - List the directory and file or data set names that you would like to + exclude from the unarchive action. + - Mutually exclusive with include. + type: list + elements: str + required: false + list: + description: + - Will list the contents of the archive without unpacking. + type: bool + required: false + default: false + dest_data_set: + description: + - Data set attributes to customize a C(dest) data set that the archive will be copied into. + required: false + type: dict + suboptions: + name: + description: + - Desired name for destination dataset. + type: str + required: false + type: + description: + - Organization of the destination + type: str + required: false + default: SEQ + choices: + - SEQ + - PDS + - PDSE + space_primary: + description: + - If the destination I(dest) data set does not exist , this sets the + primary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_secondary: + description: + - If the destination I(dest) data set does not exist , this sets the + secondary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_type: + description: + - If the destination data set does not exist, this sets the unit of + measurement to use when defining primary and secondary space. + - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + type: str + choices: + - K + - M + - G + - CYL + - TRK + required: false + record_format: + description: + - If the destination data set does not exist, this sets the format of + the + data set. (e.g C(FB)) + - Choices are case-insensitive. + required: false + choices: + - FB + - VB + - FBA + - VBA + - U + type: str + record_length: + description: + - The length of each record in the data set, in bytes. + - For variable data sets, the length must include the 4-byte prefix + area. + - "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, + if U 0." + type: int + required: false + block_size: + description: + - The block size to use for the data set. + type: int + required: false + directory_blocks: + description: + - The number of directory blocks to allocate to the data set. + type: int + required: false + key_offset: + description: + - The key offset to use when creating a KSDS data set. + - I(key_offset) is required when I(type=KSDS). + - I(key_offset) should only be provided when I(type=KSDS) + type: int + required: false + key_length: + description: + - The key length to use when creating a KSDS data set. + - I(key_length) is required when I(type=KSDS). + - I(key_length) should only be provided when I(type=KSDS) + type: int + required: false + sms_storage_class: + description: + - The storage class for an SMS-managed dataset. + - Required for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_data_class: + description: + - The data class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_management_class: + description: + - The management class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary data + sets. + - The default HLQ is the Ansible user used to execute the module and if + that is not available, then the environment variable value C(TMPHLQ) is + used. + type: str + required: false + force: + description: + - If set to true and the remote file or data set dest exists, the dest + will be deleted. + type: bool + required: false + default: false + remote_src: + description: + - If set to true, C(zos_unarchive) retrieves the archive from the remote + system. + - If set to false, C(zos_unarchive) searches the local machine (Ansible + controller) for the archive. + type: bool + required: false + default: false + +notes: + - VSAMs are not supported. + +seealso: + - module: zos_unarchive +''' + +EXAMPLES = r''' +# Simple extract +- name: Copy local tar file and unpack it on the managed z/OS node. + zos_unarchive: + path: "./files/archive_folder_test.tar" + format: + name: tar + +# use include +- name: Unarchive a bzip file selecting only a file to unpack. + zos_unarchive: + path: "/tmp/test.bz2" + format: + name: bz2 + include: + - 'foo.txt' + +# Use exclude +- name: Unarchive a terse data set and excluding data sets from unpacking. + zos_unarchive: + path: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + exclude: + - USER.ARCHIVE.TEST1 + - USER.ARCHIVE.TEST2 + +# List option +- name: List content from XMIT + zos_unarchive: + path: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit + format_options: + use_adrdssu: True + list: True +''' + +RETURN = r''' +path: + description: + File path or data set name unarchived. + type: str + returned: always +dest_path: + description: + - Destination path where archive was extracted. + type: str + returned: always +targets: + description: + List of files or data sets in the archive. + type: list + elements: str + returned: success +missing: + description: + Any files or data sets not found during extraction. + type: str + returned: success +''' + +import abc +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser, + data_set, + mvs_cmd) +import re +import os +import zipfile +import tarfile +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingZOAUImport, +) + +try: + from zoautil_py import datasets +except Exception: + Datasets = MissingZOAUImport() + +data_set_regex = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}" + +XMIT_RECORD_LENGTH = 80 +AMATERSE_RECORD_LENGTH = 1024 + + +class Unarchive(): + def __init__(self, module): + self.module = module + self.src = module.params.get("src") + self.dest = module.params.get("dest") + self.format = module.params.get("format").get("name") + self.format_options = module.params.get("format").get("format_options") + self.tmphlq = module.params.get("tmp_hlq") + self.force = module.params.get("force") + self.targets = list() + self.include = module.params.get("include") + self.exclude = module.params.get("exclude") + self.list = module.params.get("list") + self.changed = False + self.missing = list() + self.remote_src = module.params.get("remote_src") + if self.dest == '': + self.dest = os.path.dirname(self.src) + + @abc.abstractmethod + def extract_src(self): + pass + + @abc.abstractmethod + def _list_content(self): + pass + + def src_exists(self): + return self.src and os.path.exists(self.src) + + def dest_type(self): + return "USS" + + def dest_unarchived(self): + return bool(self.targets) + + def update_permissions(self): + """ + Update permissions in unarchived files. + """ + for target in self.targets: + file_name = os.path.join(self.dest, target) + file_args = self.module.load_file_common_arguments(self.module.params, path=file_name) + self.module.set_fs_attributes_if_different(file_args, self.changed) + + @property + def result(self): + return { + 'src': self.src, + 'dest_path': self.dest, + 'changed': self.changed, + 'targets': self.targets, + 'missing': self.missing, + } + + +class TarUnarchive(Unarchive): + def __init__(self, module): + super(TarUnarchive, self).__init__(module) + + def open(self, path): + """Open an archive using tarfile lib for read. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to be opened. + + Returns: + Return a TarFile object for the path name. + """ + if self.format == 'tar': + file = tarfile.open(path, 'r') + elif self.format in ('pax'): + file = tarfile.open(path, 'r', format=tarfile.GNU_FORMAT) + elif self.format in ('gz', 'bz2'): + file = tarfile.open(path, 'r:' + self.format) + else: + self.module.fail_json(msg="%s is not a valid archive format for listing contents" % self.format) + return file + + def list_archive_content(self, path): + self.targets = self._list_content(self.src) + + def _list_content(self, path): + """Returns a list of members in an archive. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to list its contents. + + Returns: + list(str): List of members inside the archive. + """ + self.file = self.open(path) + members = self.file.getnames() + self.file.close() + return members + + def extract_src(self): + """Unpacks the contents of the archive stored in path into dest folder. + + """ + original_working_dir = os.getcwd() + # The function gets relative paths, so it changes the current working + # directory to the root of src. + os.chdir(self.dest) + self.file = self.open(self.src) + + files_in_archive = self.file.getnames() + if self.include: + for path in self.include: + if path not in files_in_archive: + self.missing.append(path) + else: + self.file.extract(path) + self.targets.append(path) + elif self.exclude: + for path in files_in_archive: + if path not in self.exclude: + self.file.extract(path) + self.targets.append(path) + else: + self.file.extractall(members=sanitize_members(self.file.getmembers(), self.dest, self.format)) + self.targets = files_in_archive + self.file.close() + # Returning the current working directory to what it was before to not + # interfere with the rest of the module. + os.chdir(original_working_dir) + self.changed = bool(self.targets) + + +class ZipUnarchive(Unarchive): + def __init__(self, module): + super(ZipUnarchive, self).__init__(module) + + def open(self, path): + """Unpacks the contents of the archive stored in path into dest folder. + + """ + try: + file = zipfile.ZipFile(path, 'r', zipfile.ZIP_DEFLATED, True) + except zipfile.BadZipFile: + self.module.fail_json( + msg="Improperly compressed zip file, unable to to open file {0} ".format(path) + ) + return file + + def list_archive_content(self): + self.targets = self._list_content(self.src) + + def _list_content(self, path): + """Returns a list of members in an archive. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to list its contents. + + Returns: + list(str): List of members inside the archive. + """ + self.file = self.open(path) + members = self.file.namelist() + self.file.close() + return members + + def extract_src(self): + """Returns a list of members in an archive. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to list its contents. + + Returns: + list(str): List of members inside the archive. + """ + original_working_dir = os.getcwd() + # The function gets relative paths, so it changes the current working + # directory to the root of src. + os.chdir(self.dest) + self.file = self.open(self.src) + + files_in_archive = self.file.namelist() + if self.include: + for path in self.include: + if path not in files_in_archive: + self.missing.append(path) + else: + self.file.extract(path) + self.targets.append(path) + elif self.exclude: + for path in files_in_archive: + if path not in self.exclude: + self.file.extract(path) + self.targets.append(path) + else: + self.file.extractall(members=sanitize_members(self.file.infolist(), self.dest, self.format)) + self.targets = files_in_archive + self.file.close() + # Returning the current working directory to what it was before to not + # interfere with the rest of the module. + os.chdir(original_working_dir) + self.changed = bool(self.targets) + + +class MVSUnarchive(Unarchive): + def __init__(self, module): + super(MVSUnarchive, self).__init__(module) + self.volumes = self.format_options.get("dest_volumes") + self.use_adrdssu = self.format_options.get("use_adrdssu") + self.dest_data_set = module.params.get("dest_data_set") + self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set + self.source_size = 0 + + def dest_type(self): + return "MVS" + + def _compute_dest_data_set_size(self): + """ + Computes the attributes that the destination data set or temporary destination + data set should have in terms of size, record_length, etc. + """ + + """ + - Size of temporary DS for archive handling. + + If remote_src then we can get the source_size from archive on the system. + + If not remote_src then we can get the source_size from temporary_ds. + Both are named src so no problemo. + + If format is xmit, dest_data_set size is the same as source_size. + + If format is terse, dest_data_set size is different than the source_size, has to be greater, + but how much? In this case we can add dest_data_set option. + + Apparently the only problem is when format name is terse. + """ + + # Get the size from the system + src_attributes = datasets.listing(self.src)[0] + # The size returned by listing is in bytes. + source_size = int(src_attributes.total_space) + if self.format == 'terse': + source_size = int(source_size * 1.5) + return source_size + + def _create_dest_data_set( + self, + name=None, + replace=None, + type=None, + space_primary=None, + space_secondary=None, + space_type=None, + record_format=None, + record_length=None, + block_size=None, + directory_blocks=None, + key_length=None, + key_offset=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + volumes=None, + tmp_hlq=None, + force=None, + ): + """Create a temporary data set. + + Arguments: + tmp_hlq(str): A HLQ specified by the user for temporary data sets. + + Returns: + str: Name of the temporary data set created. + """ + arguments = locals() + if name is None: + if tmp_hlq: + hlq = tmp_hlq + else: + rc, hlq, err = self.module.run_command("hlq") + hlq = hlq.replace('\n', '') + cmd = "mvstmphelper {0}.RESTORE".format(hlq) + rc, temp_ds, err = self.module.run_command(cmd) + arguments.update(name=temp_ds.replace('\n', '')) + if record_format is None: + arguments.update(record_format="FB") + if record_length is None: + arguments.update(record_length=80) + if type is None: + arguments.update(type="SEQ") + if space_primary is None: + arguments.update(space_primary=self._compute_dest_data_set_size()) + arguments.pop("self") + changed = data_set.DataSet.ensure_present(**arguments) + return arguments["name"], changed + + def _get_include_data_sets_cmd(self): + include_cmd = "INCL( " + for include_ds in self.include: + include_cmd += " '{0}', - \n".format(include_ds) + include_cmd += " ) - \n" + return include_cmd + + def _get_exclude_data_sets_cmd(self): + exclude_cmd = "EXCL( - \n" + for exclude_ds in self.exclude: + exclude_cmd += " '{0}', - \n".format(exclude_ds) + exclude_cmd += " ) - \n" + return exclude_cmd + + def _get_volumes(self): + volumes_cmd = "OUTDYNAM( - \n" + for volume in self.volumes: + volumes_cmd += " ('{0}'), - \n".format(volume) + volumes_cmd += " ) - \n" + return volumes_cmd + + def _restore(self, source): + """ + Calls ADDRSU using RESTORE to unpack the dump datasets. + + Arguments: + source(str): Name of the data set to use as archive in ADRDSSU restore operation. + + Returns: + int: Return code result of restore operation. + """ + filter = "INCL(**) " + volumes = "" + force = "REPLACE -\n TOLERATE(ENQFAILURE) " if self.force else "" + if self.include: + filter = self._get_include_data_sets_cmd() + if self.exclude: + filter = self._get_exclude_data_sets_cmd() + if self.volumes: + volumes = self._get_volumes() + restore_cmd = """ RESTORE INDD(ARCHIVE) - + DS( - + {0} ) - + {1} - + CATALOG - + {2} """.format(filter, volumes, force) + dds = dict(archive="{0},old".format(source)) + rc, out, err = mvs_cmd.adrdssu(cmd=restore_cmd, dds=dds, authorized=True) + self._get_restored_datasets(out) + + if rc != 0: + # AdrddssuRestoreError + unrestored_data_sets = self._get_unrestored_datasets(out) + unrestored_data_sets = ", ".join(unrestored_data_sets) + self.clean_environment(data_sets=[source], uss_files=[], remove_targets=True) + self.module.fail_json( + msg="Failed executing ADRDSSU to unarchive {0}. List of data sets not restored : {1}".format(source, unrestored_data_sets), + stdout=out, + stderr=err, + stdout_lines=restore_cmd, + rc=rc, + ) + return rc + + def src_exists(self): + return data_set.DataSet.data_set_exists(self.src) + + def _get_restored_datasets(self, output): + ds_list = list() + find_ds_list = re.findall(r"SUCCESSFULLY PROCESSED\n(?:.*\n)*", output) + if find_ds_list: + ds_list = re.findall(data_set_regex, find_ds_list[0]) + self.targets = ds_list + return ds_list + + def _get_unrestored_datasets(self, output): + ds_list = list() + output = output.split("SUCCESSFULLY PROCESSED")[0] + find_ds_list = re.findall(r"NOT PROCESSED FROM THE LOGICALLY FORMATTED DUMP TAPE DUE TO \n(?:.*\n)*", output) + if find_ds_list: + ds_list = re.findall(data_set_regex, find_ds_list[0]) + return ds_list + + @abc.abstractmethod + def unpack(self): + pass + + def extract_src(self): + """Extract the MVS path contents. + + """ + temp_ds = "" + if not self.use_adrdssu: + temp_ds, rc = self._create_dest_data_set(**self.dest_data_set) + rc = self.unpack(self.src, temp_ds) + else: + temp_ds, rc = self._create_dest_data_set(type="SEQ", + record_format="U", + record_length=0, + tmp_hlq=self.tmphlq, + replace=True) + self.unpack(self.src, temp_ds) + rc = self._restore(temp_ds) + datasets.delete(temp_ds) + self.changed = not rc + + if not self.remote_src: + datasets.delete(self.src) + return + + def _list_content(self, source): + restore_cmd = " RESTORE INDD(ARCHIVE) DS(INCL(**)) " + cmd = " mvscmdauth --pgm=ADRDSSU --archive={0},old --args='TYPRUN=NORUN' --sysin=stdin --sysprint=*".format(source) + rc, out, err = self.module.run_command(cmd, data=restore_cmd) + self._get_restored_datasets(out) + + def list_archive_content(self): + temp_ds, rc = self._create_dest_data_set(type="SEQ", record_format="U", record_length=0, tmp_hlq=self.tmphlq, replace=True) + self.unpack(self.src, temp_ds) + self._list_content(temp_ds) + datasets.delete(temp_ds) + if not self.remote_src: + datasets.delete(self.src) + + def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False): + """Removes any allocated data sets that won't be needed after module termination. + Arguments: + data_sets - {list(str)} : list of data sets to remove + uss_files - {list(str)} : list of uss files to remove + remove_targets - bool : Indicates if already unpacked data sets need to be removed too. + """ + if data_set is not None: + for ds in data_sets: + data_set.DataSet.ensure_absent(ds) + if uss_files is not None: + for file in uss_files: + os.remove(file) + if remove_targets: + for target in self.targets: + data_set.DataSet.ensure_absent(target) + + +class AMATerseUnarchive(MVSUnarchive): + def __init__(self, module): + super(AMATerseUnarchive, self).__init__(module) + + def unpack(self, src, dest): + """ + Unpacks using AMATerse, assumes the data set has only been packed once. + """ + dds = {'args': 'UNPACK', 'sysut1': src, 'sysut2': dest} + rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) + if rc != 0: + self.clean_environment(data_sets=[dest], uss_files=[], remove_targets=True) + self.module.fail_json( + msg="Failed executing AMATERSE to restore {0} into {1}".format(src, dest), + stdout=out, + stderr=err, + rc=rc, + ) + return rc + + +class XMITUnarchive(MVSUnarchive): + def __init__(self, module): + super(XMITUnarchive, self).__init__(module) + + def unpack(self, src, dest): + """ + Unpacks using XMIT. + + src is the archive + dest is the destination dataset + """ + unpack_cmd = """ + PROFILE NOPROMPT + RECEIVE INDSN('{0}') + DA('{1}') + """.format(src, dest) + rc, out, err = mvs_cmd.ikjeft01(cmd=unpack_cmd, authorized=True) + if rc != 0: + self.module.fail_json( + msg="Failed executing RECEIVE to restore {0} into {1}".format(src, dest), + stdout=out, + stderr=err, + rc=rc, + ) + return rc + + +def get_unarchive_handler(module): + format = module.params.get("format").get("name") + if format in ["tar", "gz", "bz2", "pax"]: + return TarUnarchive(module) + elif format == "terse": + return AMATerseUnarchive(module) + elif format == "xmit": + return XMITUnarchive(module) + return ZipUnarchive(module) + + +def tar_filter(member, dest_path): + name = member.name + if name.startswith(('/', os.sep)): + name = member.path.lstrip('/' + os.sep) + if os.path.isabs(name): + raise AbsolutePathError + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + if member.islnk() or member.issym(): + if os.path.isabs(member.linkname): + raise AbsoluteLinkError(member) + target_path = os.path.realpath(os.path.join(dest_path, member.linkname)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise LinkOutsideDestinationError(member, target_path) + + +def zip_filter(member, dest_path): + name = member.filename + if name.startswith(('/', os.sep)): + name = name.lstrip('/' + os.sep) + if os.path.isabs(name): + raise AbsolutePathError + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + + +def sanitize_members(members, dest, format): + """ + Filter inspired by (PEP 706) + - Refuse to extract any absolute path + - Refuse to extract any member with leading '/' + """ + dest_path = os.path.realpath(dest) + for member in members: + if format == 'zip': + zip_filter(member, dest_path) + else: + tar_filter(member, dest_path) + return members + + +class AbsolutePathError(Exception): + def __init__(self, tarinfo): + self.msg = "Unable to extract {0} as the files extracted can not contain an absolute path".format(tarinfo.name) + super().__init__(self.msg) + + +class OutsideDestinationError(Exception): + def __init__(self, tarinfo, path): + self.msg = 'Unable to extract {0} to {1}, which is outside the designated destination'.format(tarinfo.name, path) + super().__init__(self.msg) + + +class AbsoluteLinkError(Exception): + def __init__(self, tarinfo): + self.msg = '{0} is a symlink to an absolute path'.format(tarinfo.name) + super().__init__(self.msg) + + +class LinkOutsideDestinationError(Exception): + def __init__(self, tarinfo, path): + self.msg = 'Unable to extract {0} it would link to {1}, which is outside the designated destination'.format(tarinfo.name, path) + super().__init__() + + +def run_module(): + module = AnsibleModule( + argument_spec=dict( + src=dict(type='str', required=True), + dest=dict(type='str'), + include=dict(type='list', elements='str'), + exclude=dict(type='list', elements='str'), + list=dict(type='bool', default=False), + format=dict( + type='dict', + required=True, + options=dict( + name=dict( + type='str', + required=True, + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + xmit_log_data_set=dict( + type='str', + required=False, + ), + dest_volumes=dict( + type='list', + elements='str', + ), + use_adrdssu=dict( + type='bool', + default=False, + ) + ) + ), + ), + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + dest_data_set=dict( + type='dict', + required=False, + options=dict( + name=dict( + type='str', required=False, + ), + type=dict( + type='str', + choices=['SEQ', 'PDS', 'PDSE'], + required=False, + default='SEQ', + ), + space_primary=dict( + type='int', required=False), + space_secondary=dict( + type='int', required=False), + space_type=dict( + type='str', + choices=['K', 'M', 'G', 'CYL', 'TRK'], + required=False, + ), + record_format=dict( + type='str', + choices=["FB", "VB", "FBA", "VBA", "U"], + required=False + ), + record_length=dict(type='int', required=False), + block_size=dict(type='int', required=False), + directory_blocks=dict(type="int", required=False), + key_offset=dict(type="int", required=False, no_log=False), + key_length=dict(type="int", required=False, no_log=False), + sms_storage_class=dict(type="str", required=False), + sms_data_class=dict(type="str", required=False), + sms_management_class=dict(type="str", required=False), + ) + ), + tmp_hlq=dict(type='str'), + force=dict(type='bool', default=False), + remote_src=dict(type='bool', default=False), + ), + mutually_exclusive=[ + ['include', 'exclude'], + ], + supports_check_mode=True, + ) + + arg_defs = dict( + src=dict(type='str', required=True), + dest=dict(type='str', required=False, default=''), + include=dict(type='list', elements='str'), + exclude=dict(type='list', elements='str'), + list=dict(type='bool', default=False), + format=dict( + type='dict', + required=True, + options=dict( + name=dict( + type='str', + required=True, + default='gz', + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + xmit_log_data_set=dict( + type='str', + required=False, + ), + dest_volumes=dict( + type='list', + elements='str' + ), + use_adrdssu=dict( + type='bool', + default=False, + ), + ), + default=dict(xmit_log_data_set=""), + ) + ), + default=dict(name="", format_options=dict(xmit_log_data_set="")), + ), + dest_data_set=dict( + arg_type='dict', + required=False, + options=dict( + name=dict(arg_type='str', required=False), + type=dict(arg_type='str', required=False, default="SEQ"), + space_primary=dict(arg_type='int', required=False), + space_secondary=dict( + arg_type='int', required=False), + space_type=dict(arg_type='str', required=False), + record_format=dict( + arg_type='str', required=False), + record_length=dict(type='int', required=False), + block_size=dict(arg_type='int', required=False), + directory_blocks=dict(arg_type="int", required=False), + key_offset=dict(arg_type="int", required=False), + key_length=dict(arg_type="int", required=False), + sms_storage_class=dict(arg_type="str", required=False), + sms_data_class=dict(arg_type="str", required=False), + sms_management_class=dict(arg_type="str", required=False), + ) + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + tmp_hlq=dict(type='qualifier_or_empty', default=''), + force=dict(type='bool', default=False), + remote_src=dict(type='bool', default=False), + mutually_exclusive=[ + ['include', 'exclude'], + ], + ) + + try: + parser = better_arg_parser.BetterArgParser(arg_defs) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json(msg="Parameter verification failed", stderr=str(err)) + unarchive = get_unarchive_handler(module) + + if unarchive.list: + unarchive.list_archive_content() + module.exit_json(**unarchive.result) + + if not unarchive.src_exists(): + module.fail_json(msg="{0} does not exists, please provide a valid src.".format(module.params.get("src"))) + + unarchive.extract_src() + + if unarchive.dest_unarchived() and unarchive.dest_type() == "USS": + unarchive.update_permissions() + + module.exit_json(**unarchive.result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py new file mode 100644 index 000000000..e3b4b4ba7 --- /dev/null +++ b/tests/functional/modules/test_zos_archive_func.py @@ -0,0 +1,900 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2020, 2022 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function +import time + +import pytest +__metaclass__ = type + +SHELL_EXECUTABLE = "/bin/sh" +USS_TEMP_DIR = "/tmp/archive" +USS_TEST_FILES = { f"{USS_TEMP_DIR}/foo.txt" : "foo sample content", + f"{USS_TEMP_DIR}/bar.txt": "bar sample content", + f"{USS_TEMP_DIR}/empty.txt":""} +USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" +TEST_PS = "USER.PRIVATE.TESTDS" +TEST_PDS = "USER.PRIVATE.TESTPDS" +HLQ = "USER" +MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" + +USS_DEST_ARCHIVE = "testarchive.dzp" + +STATE_ARCHIVED = 'archive' +STATE_INCOMPLETE = 'incomplete' + +USS_FORMATS = ['tar', 'zip', 'gz', 'bz2', 'pax'] + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +def set_uss_test_env(ansible_zos_module, test_files): + for key, value in test_files.items(): + ansible_zos_module.all.shell( + cmd=f"echo \"{value}\" > \"{key}\"", + executable=SHELL_EXECUTABLE, + ) + +def create_multiple_data_sets(ansible_zos_module, base_name, n, type, ): + test_data_sets = [] + for i in range(n): + curr_ds = dict(name=base_name+str(i), + type=type, + state="present", + replace=True, + force=True) + test_data_sets.append(curr_ds) + + # Create data sets in batch + ansible_zos_module.all.zos_data_set( + batch=test_data_sets + ) + return test_data_sets + +def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): + test_members = [] + for i in range(n): + curr_ds = dict(name="{0}({1})".format(pds_name, member_base_name+str(i)), + type="member", + state="present", + replace=True, + force=True) + test_members.append(curr_ds) + ansible_zos_module.all.zos_data_set( + batch=test_members + ) + return test_members + +###################################################### +# +# USS TEST +# +###################################################### +""" +List of tests: +- test_uss_single_archive +- test_uss_single_archive_with_mode +- test_uss_single_archive_with_force_option +- test_uss_archive_multiple_files +- test_uss_archive_multiple_files_with_exclude +- test_uss_archive_remove_targets +""" + + +# Core functionality tests +# Test archive with no options +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert "archive.{0}".format(format) in c_result.get("stdout") + + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive_with_mode(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + dest_mode = "0755" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + ), + mode=dest_mode) + stat_dest_res = hosts.all.stat(path=dest) + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + for stat_result in stat_dest_res.contacted.values(): + assert stat_result.get("stat").get("exists") is True + assert stat_result.get("stat").get("mode") == dest_mode + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive_with_force_option(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is True + assert result.get("changed") is False + + set_uss_test_env(hosts, USS_TEST_FILES) + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + ), + force=True,) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +@pytest.mark.parametrize("path", [ + dict(files= f"{USS_TEMP_DIR}/*.txt", size=len(USS_TEST_FILES)), + dict(files=list(USS_TEST_FILES.keys()), size=len(USS_TEST_FILES)), + dict(files= f"{USS_TEMP_DIR}/" , size=len(USS_TEST_FILES) + 1), + ]) +def test_uss_archive_multiple_files(ansible_zos_module, format, path): + try: + hosts = ansible_zos_module + hosts.all.file(path=USS_TEMP_DIR, state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=path.get("files"), + dest=dest, + format=dict(name=format),) + + # resulting archived tag varies in size when a folder is archived using zip. + size = path.get("size") + + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + assert len(result.get("archived")) == size + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert f"archive.{format}" in c_result.get("stdout") + + finally: + hosts.all.file(path=USS_TEMP_DIR, state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +@pytest.mark.parametrize("path", [ + dict(files=list(USS_TEST_FILES.keys()), size=len(USS_TEST_FILES) - 1, exclude=[f'{USS_TEMP_DIR}/foo.txt']), + dict(files= f"{USS_TEMP_DIR}/" , size=len(USS_TEST_FILES) + 1, exclude=[]), + ]) +def test_uss_archive_multiple_files_with_exclude(ansible_zos_module, format, path): + try: + hosts = ansible_zos_module + hosts.all.file(path=USS_TEMP_DIR, state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=path.get("files"), + dest=dest, + format=dict(name=format), + exclude=path.get("exclude")) + + # resulting archived tag varies in size when a folder is archived using zip. + size = path.get("size") + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + assert len(result.get("archived")) == size + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert f"archive.{format}" in c_result.get("stdout") + finally: + hosts.all.file(path=USS_TEMP_DIR, state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_archive_remove_targets(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=USS_TEMP_DIR, state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + paths = list(USS_TEST_FILES.keys()) + archive_result = hosts.all.zos_archive(src=paths, + dest=dest, + format=dict(name=format), + remove=True) + + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert f"archive.{format}" in c_result.get("stdout") + for path in paths: + assert path not in c_result.get("stdout") + finally: + hosts.all.file(path=USS_TEMP_DIR, state="absent") + + +###################################################################### +# +# MVS data sets tests +# +###################################################################### + +""" +List of tests: +- test_mvs_archive_single_dataset +- test_mvs_archive_single_dataset_use_adrdssu +- test_mvs_archive_single_data_set_remove_target +- test_mvs_archive_multiple_data_sets +- test_mvs_archive_multiple_data_sets_use_adrdssu +- test_mvs_archive_multiple_data_sets_remove_target +- test_mvs_archive_multiple_data_sets_with_exclusion +- test_mvs_archive_multiple_data_sets_with_missing + +""" +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + # "record_format", ["FB", "VB", "FBA", "VBA", "U"], + "record_format", ["FB", "VB",], +) +def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + # "record_format", ["FB", "VB", "FBA", "VBA", "U"], + "record_format", ["FB", "VB",], +) +def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + format_dict["format_options"] = dict(use_adrdssu=True) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80], +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + remove=True, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert data_set.get("name") not in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + for ds in target_ds_list: + assert ds.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + exclude = "{0}1".format(data_set.get("name")) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + exclude=exclude, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + for ds in target_ds_list: + if ds.get("name") == exclude: + assert exclude not in result.get("archived") + else: + assert ds.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + remove=True, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + for ds in target_ds_list: + assert ds.get("name") in result.get("archived") + assert ds.get("name") not in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + # Remove ds to make sure is missing + missing_ds = data_set.get("name")+"1" + hosts.all.zos_data_set(name=missing_ds, state="absent") + path_list = [ds.get("name") for ds in target_ds_list] + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src=path_list, + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest_state") == STATE_INCOMPLETE + assert missing_ds in result.get("missing") + for ds in target_ds_list: + if ds.get("name") == missing_ds: + assert ds.get("name") not in result.get("archived") + else: + assert ds.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set,): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(ds_to_write), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py new file mode 100644 index 000000000..a4bf5e007 --- /dev/null +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -0,0 +1,988 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2020, 2022 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +import pytest +import tempfile +from tempfile import mkstemp + +__metaclass__ = type + +SHELL_EXECUTABLE = "/bin/sh" +USS_TEMP_DIR = "/tmp/archive" +USS_TEST_FILES = { f"{USS_TEMP_DIR}/foo.txt" : "foo sample content", + f"{USS_TEMP_DIR}/bar.txt": "bar sample content", + f"{USS_TEMP_DIR}/empty.txt":""} +USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" +TEST_PS = "USER.PRIVATE.TESTDS" +TEST_PDS = "USER.PRIVATE.TESTPDS" +HLQ = "USER" +MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" + +USS_DEST_ARCHIVE = "testarchive.dzp" + +USS_FORMATS = ['tar', 'gz', 'bz2', 'zip', 'pax'] + +def set_uss_test_env(ansible_zos_module, test_files): + for key, value in test_files.items(): + ansible_zos_module.all.shell( + cmd=f"echo \"{value}\" > \"{key}\"", + executable=SHELL_EXECUTABLE, + ) + + +def create_multiple_data_sets(ansible_zos_module, base_name, n, type, ): + test_data_sets = [] + for i in range(n): + curr_ds = dict(name=base_name+str(i), + type=type, + state="present", + replace=True, + force=True) + test_data_sets.append(curr_ds) + + # Create data sets in batch + ansible_zos_module.all.zos_data_set( + batch=test_data_sets + ) + return test_data_sets + + +def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): + test_members = [] + for i in range(n): + curr_ds = dict(name="{0}({1})".format(pds_name, member_base_name+str(i)), + type="member", + state="present", + replace=True, + force=True) + test_members.append(curr_ds) + ansible_zos_module.all.zos_data_set( + batch=test_members + ) + return test_members + + +###################################################### +# +# USS TEST +# +###################################################### +""" +List of tests: +- test_uss_unarchive +- test_uss_unarchive_include +- test_uss_unarchive_exclude +- test_uss_unarchive_list +""" + + +# Core functionality tests +# Test unarchive with no options +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + remote_src=True, + ) + hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + for file in USS_TEST_FILES.keys(): + assert file[len(USS_TEMP_DIR)+1:] in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive_include(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] + include_list = uss_files[:2] + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + include=include_list, + remote_src=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + for file in uss_files: + if file in include_list: + assert file in c_result.get("stdout") + else: + assert file not in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive_exclude(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + # remove files + uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] + exclude_list = uss_files[:2] + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + exclude=exclude_list, + remote_src=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + for file in uss_files: + if file in exclude_list: + assert file not in c_result.get("stdout") + else: + assert file in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive_list(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + remote_src=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + for file in USS_TEST_FILES.keys(): + assert file[len(USS_TEMP_DIR)+1:] in result.get("targets") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive_with_mode(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + dest_mode = "0755" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + for file in list(USS_TEST_FILES.keys()): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + remote_src=True, + mode=dest_mode, + ) + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + dest_files = list(USS_TEST_FILES.keys()) + for file in dest_files: + stat_dest_res = hosts.all.stat(path=file) + for stat_result in stat_dest_res.contacted.values(): + assert stat_result.get("stat").get("exists") is True + assert stat_result.get("stat").get("mode") == dest_mode + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +###################################################################### +# +# MVS data sets tests +# +###################################################################### + +""" +List of tests: +- test_mvs_unarchive_single_data_set +- test_mvs_unarchive_single_data_set_use_adrdssu +- test_mvs_unarchive_multiple_data_sets_use_adrdssu +- test_mvs_unarchive_multiple_data_sets_include +- test_mvs_unarchive_multiple_data_sets_exclude +- test_mvs_unarchive_list +- test_mvs_unarchive_force +- test_mvs_unarchive_remote_src + +""" + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120] +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + dest_data_set=dict(name=data_set.get("name"), + type="SEQ", + record_format=record_format, + record_length=record_length), + ) + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + dest_data_set=dict(name=data_set.get("name"), + type=data_set.get("dstype"), + record_format=record_format, + record_length=record_length), + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + # assert result.get("dest") == MVS_DEST_ARCHIVE + # assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert data_set.get("name") in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + format_dict["format_options"] = dict(use_adrdssu=True) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True + ) + + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + # assert result.get("dest") == MVS_DEST_ARCHIVE + # assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert data_set.get("name") in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=1, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + force=True + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=2, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + include_ds = "{0}0".format(data_set.get("name")) + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + include=[include_ds], + ) + + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + if target_ds.get("name") == include_ds: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + else: + assert target_ds.get("name") not in result.get("targets") + assert target_ds.get("name") not in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=2, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + exclude_ds = "{0}0".format(data_set.get("name")) + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + exclude=[exclude_ds], + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + if target_ds.get("name") == exclude_ds: + assert target_ds.get("name") not in result.get("targets") + assert target_ds.get("name") not in c_result.get("stdout") + else: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=2, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + list=True + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is False + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") not in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +@pytest.mark.parametrize( + "force", [ + True, + False, + ]) +def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, format, data_set, force): + """ + This force test creates some data sets and attempt to extract using force flag as + True and False, when True no issues are expected, as False proper error message should + be displayed. + """ + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=1, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + force=force + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + if force: + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + else: + assert result.get("changed") is False + assert result.get("failed", False) is True + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + format_dict["format_options"] = dict(use_adrdssu=True) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + + # fetch archive data set into tmp folder + fetch_result = hosts.all.zos_fetch(src=MVS_DEST_ARCHIVE, dest=tmp_folder.name, is_binary=True) + + for res in fetch_result.contacted.values(): + source_path = res.get("dest") + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=source_path, + format=format_dict, + remote_src=False, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + # assert result.get("dest") == MVS_DEST_ARCHIVE + # assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert data_set.get("name") in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + tmp_folder.cleanup() + diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 8b4540038..a496e3ac8 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -33,3 +33,5 @@ plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issu plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 8b4540038..a496e3ac8 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -33,3 +33,5 @@ plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issu plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 8b4540038..a496e3ac8 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -33,3 +33,5 @@ plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issu plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 6d79e8952f70a3991f15a2a56eda1937390beb93 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 18 Jul 2023 15:58:50 -0400 Subject: [PATCH 141/413] Removed emergency backup and auto-recovery features. (#896) * Removed emergency backup and auto-recovery features. Initial changelog fragment. * corrected changelog with PR # * Update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> * removed 2 unused routines: restore_backup and erase_backup. --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../696-zos-copy-remove-emergency-backup.yml | 6 + plugins/modules/zos_copy.py | 151 ------------------ 2 files changed, 6 insertions(+), 151 deletions(-) create mode 100644 changelogs/fragments/696-zos-copy-remove-emergency-backup.yml diff --git a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml new file mode 100644 index 000000000..b86a18d82 --- /dev/null +++ b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml @@ -0,0 +1,6 @@ +enhancements: +- zos_copy - Previously, backups were taken when force was set to false; + whether or not a user specified this operation which caused allocation issues + with space and permissions. This removes the automatic backup performed and + reverts to the original logic in that backups must be initiated by the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/896) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 0998f2a0e..02f71ab21 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1704,123 +1704,6 @@ def backup_data(ds_name, ds_type, backup_name, tmphlq=None): ) -def restore_backup( - dest, - backup, - dest_type, - use_backup, - volume=None, - members_to_restore=None, - members_to_delete=None -): - """Restores a destination file/directory/data set by using a given backup. - - Arguments: - dest (str) -- Name of the destination data set or path of the file/directory. - backup (str) -- Name or path of the backup. - dest_type (str) -- Type of the destination. - use_backup (bool) -- Whether the destination actually created a backup, sometimes the user - tries to use an empty data set, and in that case a new data set is allocated instead - of copied. - volume (str, optional) -- Volume where the data set should be. - members_to_restore (list, optional) -- List of members of a PDS/PDSE that were overwritten - and need to be restored. - members_to_delete (list, optional) -- List of members of a PDS/PDSE that need to be erased - because they were newly added. - """ - volumes = [volume] if volume else None - - if use_backup: - if dest_type == "USS": - if os.path.isfile(backup): - os.remove(dest) - shutil.copy(backup, dest) - else: - shutil.rmtree(dest, ignore_errors=True) - shutil.copytree(backup, dest) - else: - if dest_type in data_set.DataSet.MVS_VSAM: - data_set.DataSet.ensure_absent(dest, volumes) - repro_cmd = """ REPRO - - INDATASET('{0}') - - OUTDATASET('{1}')""".format(backup.upper(), dest.upper()) - idcams(repro_cmd, authorized=True) - elif dest_type in data_set.DataSet.MVS_SEQ: - response = datasets._copy(backup, dest) - if response.rc != 0: - raise CopyOperationError( - "An error ocurred while restoring {0} from {1}".format(dest, backup), - response.rc, - response.stdout_response, - response.stderr_response - ) - else: - if not members_to_restore: - members_to_restore = [] - if not members_to_delete: - members_to_delete = [] - - for i, member in enumerate(members_to_restore): - response = datasets._copy( - "{0}({1})".format(backup, member), - "{0}({1})".format(dest, member) - ) - - if response.rc != 0: - # In case of a failure, we'll assume that all past - # members in the list (with index < i) were restored successfully. - raise CopyOperationError( - "Error ocurred while restoring {0}({1}) from backup {2}.".format( - dest, - member, - backup - ) + " Members restored: {0}. Members that didn't get restored: {1}".format( - members_to_restore[:i], - members_to_restore[i:] - ), - response.rc, - response.stdout_response, - response.stderr_response - ) - - for i, member in enumerate(members_to_delete): - response = datasets._delete_members("{0}({1})".format(dest, member)) - - if response.rc != 0: - raise CopyOperationError( - "Error while deleting {0}({1}) after copy failure.".format(dest, member) + - " Members deleted: {0}. Members not able to be deleted: {1}".format( - members_to_delete[:i], - members_to_delete[i:] - ), - response.rc, - response.stdout_response, - response.stderr_response - ) - - else: - data_set.DataSet.ensure_absent(dest, volumes) - data_set.DataSet.allocate_model_data_set(dest, backup, volume) - - -def erase_backup(backup, dest_type, volume=None): - """Erases a temporary backup from the system. - - Arguments: - backup (str) -- Name or path of the backup. - dest_type (str) -- Type of the destination. - volume (str, optional) -- Volume where the data set should be. - """ - if dest_type == "USS": - if os.path.isfile(backup): - os.remove(backup) - else: - shutil.rmtree(backup, ignore_errors=True) - else: - volumes = [volume] if volume else None - data_set.DataSet.ensure_absent(backup, volumes) - - def is_compatible( src_type, dest_type, @@ -2609,32 +2492,6 @@ def run_module(module, arg_def): dest=dest ) - # Creating an emergency backup or an empty data set to use as a model to - # be able to restore the destination in case the copy fails. - emergency_backup = "" - if dest_exists and not force: - if is_uss or not data_set.DataSet.is_empty(dest_name): - use_backup = True - if is_uss: - # When copying a directory without a trailing slash, - # appending the source's base name to the backup path to - # avoid backing up the whole parent directory that won't - # be modified. - src_basename = os.path.basename(src) if src else '' - backup_dest = "{0}/{1}".format(dest, src_basename) if is_src_dir and not src.endswith("/") else dest - backup_dest = os.path.normpath(backup_dest) - emergency_backup = tempfile.mkdtemp() - emergency_backup = backup_data(backup_dest, dest_ds_type, emergency_backup, tmphlq) - else: - if not (dest_ds_type in data_set.DataSet.MVS_PARTITIONED and src_member and not dest_member_exists): - emergency_backup = backup_data(dest, dest_ds_type, None, tmphlq) - # If dest is an empty data set, instead create a data set to - # use as a model when restoring. - else: - use_backup = False - emergency_backup = data_set.DataSet.temp_name() - data_set.DataSet.allocate_model_data_set(emergency_backup, dest_name) - # Here we'll use the normalized source file by shadowing the # original one. This change applies only to the # allocate_destination_data_set call. @@ -2659,9 +2516,6 @@ def run_module(module, arg_def): volume=volume ) except Exception as err: - if dest_exists and not force: - restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) - erase_backup(emergency_backup, dest_ds_type) if converted_src: if remote_src: src = original_src @@ -2790,12 +2644,7 @@ def run_module(module, arg_def): res_args["changed"] = True except CopyOperationError as err: - if dest_exists and not force: - restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) raise err - finally: - if dest_exists and not force: - erase_backup(emergency_backup, dest_ds_type) res_args.update( dict( From 7720bd3d72ee9f51bf2878864b5c10688c2c5579 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 18 Jul 2023 16:01:34 -0400 Subject: [PATCH 142/413] Enhance/839/add field to job query (#841) * Initial change to manage 1.2.4 column changes for job listing * Improved documentation, code validated with playbooks. * Updated changelog fragment with PR * corrected duplicate entry in output documentation * Changes to comments and field names as per PR 841 Update to in-code documentation Expansion of changelog fragment * correction to datetime processing * fixing sample data --- .../839-Add-Field-to-zos-job-query.yml | 10 ++++ plugins/module_utils/job.py | 21 +++++++-- plugins/modules/zos_job_output.py | 46 +++++++++++++++++++ plugins/modules/zos_job_query.py | 38 ++++++++++----- plugins/modules/zos_job_submit.py | 33 +++++++++---- 5 files changed, 123 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/839-Add-Field-to-zos-job-query.yml diff --git a/changelogs/fragments/839-Add-Field-to-zos-job-query.yml b/changelogs/fragments/839-Add-Field-to-zos-job-query.yml new file mode 100644 index 000000000..52370356c --- /dev/null +++ b/changelogs/fragments/839-Add-Field-to-zos-job-query.yml @@ -0,0 +1,10 @@ +minor_changes: +- zos_job_query - zoau added 'program_name' to their field output + starting with v1.2.4. This enhancement checks for that version and passes the extra column through. + (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_submit - zoau added 'program_name' to their field output + starting with v1.2.4. This enhancement checks for that version and passes the extra column through. + (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_output - zoau added 'program_name' to their field output + starting with v1.2.4. This enhancement checks for that version and passes the extra column through. + (https://github.com/ansible-collections/ibm_zos_core/pull/841) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d07ef5ac5..391583b75 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -207,7 +207,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] - # creationdate=job[9] creationtime=job[10] queueposition=job[11] + # creationdatetime=job[9] queueposition=job[10] + # starting in zoau 1.2.4, program_name[11] was added. final_entries = [] entries = listing(job_id=job_id_temp) @@ -246,17 +247,27 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"]["code"] = int(entry.rc) job["ret_code"]["msg_text"] = entry.status - # this section only works on zoau 1.2.3 vvv + # this section only works on zoau 1.2.3/+ vvv - if ZOAU_API_VERSION > "1.2.2": + if ZOAU_API_VERSION > "1.2.2" and ZOAU_API_VERSION < "1.2.4": job["job_class"] = entry.job_class job["svc_class"] = entry.svc_class job["priority"] = entry.priority job["asid"] = entry.asid - job["creation_datetime"] = entry.creation_datetime + job["creation_date"] = str(entry.creation_datetime)[0:10] + job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position + elif ZOAU_API_VERSION >= "1.2.4": + job["job_class"] = entry.job_class + job["svc_class"] = entry.svc_class + job["priority"] = entry.priority + job["asid"] = entry.asid + job["creation_date"] = str(entry.creation_datetime)[0:10] + job["creation_time"] = str(entry.creation_datetime)[12:] + job["queue_position"] = entry.queue_position + job["program_name"] = entry.program_name - # this section only works on zoau 1.2.3 ^^^ + # this section only works on zoau 1.2.3/+ ^^^ job["class"] = "" job["content_type"] = "" diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 3bf9f69d6..636698b3b 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -116,6 +116,16 @@ Type of address space. type: str sample: JOB + creation_date: + description: + Date, local to the target system, when the job was created. + type: str + sample: "2023-05-04" + creation_time: + description: + Time, local to the target system, when the job was created. + type: str + sample: "14:15:00" ddnames: description: Data definition names. @@ -175,6 +185,37 @@ " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " ] + job_class: + description: + Job class for this job. + type: str + sample: A + svc_class: + description: + Service class for this job. + type: str + sample: C + priority: + description: + A numeric indicator of the job priority assigned through JES. + type: int + sample: 4 + asid: + description: + The address Space Identifier (ASID) that is a unique descriptor for the job address space. + Zero if not active. + type: int + sample: 0 + queue_position: + description: + The position within the job queue where the jobs resides. + type: int + sample: 3 + program_name: + description: + The name of the program found in the job's last completed step found in the PGM parameter. + type: str + sample: "IEBGENER" ret_code: description: Return code output collected from job log. @@ -341,8 +382,13 @@ "stepname": "STEP0001" } ], + "duration": 0, + "job_class": "R", "job_id": "JOB00134", "job_name": "HELLO", + "priority": "1", + "program_name": "IEBGENER", + "queue_position": "58", "owner": "OMVSADM", "ret_code": { "code": 0, diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index cb9a28a53..431e06f02 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -176,12 +176,12 @@ } job_class: description: - Letter indicating job class for this job. + Job class for this job. type: str sample: A svc_class: description: - Character indicating service class for this job. + Service class for this job. type: str sample: C priority: @@ -191,19 +191,31 @@ sample: 4 asid: description: - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. + Zero if not active. type: int sample: 0 - creation_datetime: + creation_date: description: - Date and time, local to the target system, when the job was created. + Date, local to the target system, when the job was created. type: str - sample: 20230504T141500 + sample: "2023-05-04" + creation_time: + description: + Time, local to the target system, when the job was created. + type: str + sample: "14:15:00" queue_position: description: - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. type: int sample: 3 + program_name: + description: + The name of the program found in the job's last completed step found in the PGM parameter. + type: str + sample: "IEBGENER" + sample: [ { @@ -215,7 +227,8 @@ "svc_class": "?", "priority": 1, "asid": 0, - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "queue_position": 3, }, { @@ -227,7 +240,8 @@ "svc_class": "E", "priority": 0, "asid": 4, - "creation_datetime": "20230503T121400", + "creation_date": "2023-05-03", + "creation_time": "12:14:00", "queue_position": 0, }, ] @@ -277,7 +291,7 @@ def run_module(): module.exit_json(**result) -# validate_arguments rturns a tuple, so we don't have to rebuild the job_name string +# validate_arguments returns a tuple, so we don't have to rebuild the job_name string def validate_arguments(params): job_name_in = params.get("job_name") @@ -400,8 +414,10 @@ def parsing_jobs(jobs_raw): "svc_class": job.get("svc_class"), "priority": job.get("priority"), "asid": job.get("asid"), - "creation_datetime": job.get("creation_datetime"), + "creation_date": job.get("creation_date"), + "creation_time": job.get("creation_time"), "queue_position": job.get("queue_position"), + "program_name": job.get("program_name"), } jobs.append(job_dict) return jobs diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index bc9f8ff19..994f4147d 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -282,12 +282,12 @@ } job_class: description: - Letter indicating job class for this job. + Job class for this job. type: str sample: A svc_class: description: - Character indicating service class for this job. + Service class for this job. type: str sample: C priority: @@ -297,19 +297,31 @@ sample: 4 asid: description: - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. + Zero if not active. type: int sample: 0 - creation_datetime: + creation_date: description: - Date and time, local to the target system, when the job was created. + Date, local to the target system, when the job was created. type: str - sample: 20230504T141500 + sample: "2023-05-04" + creation_time: + description: + Time, local to the target system, when the job was created. + type: str + sample: "14:15:00" queue_position: description: - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. type: int sample: 3 + program_name: + description: + The name of the program found in the job's last completed step found in the PGM parameter. + type: str + sample: "IEBGENER" + sample: [ { @@ -526,10 +538,13 @@ "job_class": "K", "svc_class": "?", "priority": 1, + "program_name": "IEBGENER", "asid": 0, - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "queue_position": 3, - "subsystem": "STL1" + "subsystem": "STL1", + "system": "STL1" } ] message: From cf755e31a46cc231fb818faa5ab1b8612045d173 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 18 Jul 2023 14:06:10 -0600 Subject: [PATCH 143/413] Enhancement/850/Redesining test cases to be clearer (#840) * Remove duplicates and unnecesary declaration from lineinfile test * Delete blockninfile repeat and unecesary cases and finishing lineinfile * Solve test do not pass * Summary USS test case lineinfile * Clean ds tests general * Finishing clear lineinfile * Lineinfile clean proposal * Clean lineinfile * Finishing the clean of lineinfile and blockinfile * Lineinfile USS fully cleaned * Lineinfile clean * Clean blocinfile test * Structure for all test cases * Add fragment * Finall version without dictionaries * Add expected to variables encoding test case and simplify names * Close lineinfile * Remove the unnecesary marks * Get better encoding tests * Get better encoding tests * Remove encodings of datasets * Add encoding for ds --- .../fragments/840-redesign-test-cases.yml | 7 + .../modules/test_zos_blockinfile_func.py | 2079 ++++++++--------- .../modules/test_zos_lineinfile_func.py | 1568 ++++++------- tests/helpers/zos_blockinfile_helper.py | 345 --- tests/helpers/zos_lineinfile_helper.py | 340 --- 5 files changed, 1743 insertions(+), 2596 deletions(-) create mode 100644 changelogs/fragments/840-redesign-test-cases.yml delete mode 100644 tests/helpers/zos_blockinfile_helper.py delete mode 100644 tests/helpers/zos_lineinfile_helper.py diff --git a/changelogs/fragments/840-redesign-test-cases.yml b/changelogs/fragments/840-redesign-test-cases.yml new file mode 100644 index 000000000..8b9c2aee0 --- /dev/null +++ b/changelogs/fragments/840-redesign-test-cases.yml @@ -0,0 +1,7 @@ +trivial: +- zos_lininfile - Adjust test cases to be in one document and clearer to follow. +- zos_blockinfile - Adjust test cases to be in one document and clearer to follow. +bugfix: +- zos_blockinfile - Test case generate a data set that was not correctly removed. + Changes delete the correct data set not only member. + (https://github.com/ansible-collections/ibm_zos_core/pull/840) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 7cd92c9e5..23982aeec 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -12,20 +12,41 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ibm_zos_core.tests.helpers.zos_blockinfile_helper import ( - UssGeneral, - DsGeneral, - DsNotSupportedHelper, - DsGeneralResultKeyMatchesRegex, - DsGeneralForce, - DsGeneralForceFail, -) -import os -import sys +from shellescape import quote +import time +import re import pytest +import inspect __metaclass__ = type +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" +TEST_FOLDER_BLOCKINFILE = "/tmp/ansible-core-tests/zos_blockinfile/" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + TEST_CONTENT = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -33,33 +54,12 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""" +export ZOAU_ROOT""" TEST_CONTENT_DEFAULTMARKER = """if [ -z STEPLIB ] && tty -s; then @@ -68,35 +68,16 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # BEGIN ANSIBLE MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAUTIL_DIR=/usr/lpp/zoautil/v100 # END ANSIBLE MANAGED BLOCK -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""" +export ZOAU_ROOT""" TEST_CONTENT_CUSTOMMARKER = """if [ -z STEPLIB ] && tty -s; then @@ -105,35 +86,16 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # OPEN IBM MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAUTIL_DIR=/usr/lpp/zoautil/v100 # CLOSE IBM MANAGED BLOCK -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""" +export ZOAU_ROOT""" TEST_CONTENT_DOUBLEQUOTES = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //USSCMD EXEC PGM=BPXBATCH @@ -145,128 +107,13 @@ /* //""" -# supported data set types -# DS_TYPE = ['SEQ', 'PDS', 'PDSE'] -DS_TYPE = ['SEQ'] -# not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] -""" -Note: zos_encode module uses USS cp command for copying from USS file to MVS data set which only supports IBM-1047 charset. -I had to develop and use a new tool for converting and copying to data set in order to set up environment for tests to publish results on Jira. -Until the issue be addressed I disable related tests. -""" -# ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] -ENCODING = ['IBM-1047'] -USS_BACKUP_FILE = "/tmp/backup.tmp" -MVS_BACKUP_DS = "BLOCKIF.TEST.BACKUP" -MVS_BACKUP_PDS = "BLOCKIF.TEST.BACKUP(BACKUP)" -BACKUP_OPTIONS = [None, MVS_BACKUP_DS, MVS_BACKUP_PDS] -TEST_ENV = dict( - TEST_CONT=TEST_CONTENT, - TEST_DIR="/tmp/zos_blockinfile/", - TEST_FILE="", - DS_NAME="", - DS_TYPE="", - ENCODING="", -) - -TEST_INFO = dict( - test_uss_block_insertafter_regex=dict( - insertafter="ZOAU_ROOT=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present"), - test_uss_block_insertbefore_regex=dict( - insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", - state="present"), - test_uss_block_insertafter_eof=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present"), - test_uss_block_insertafter_eof_with_backup=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", backup=True), - test_uss_block_insertafter_eof_with_backup_name=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", backup=True, - backup_name=USS_BACKUP_FILE), - test_uss_block_insert_with_force_option_as_true=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", force=True), - test_uss_block_insert_with_force_option_as_false=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", force=False), - test_uss_block_insertbefore_bof=dict( - insertbefore="BOF", block="# this is file is for setting env vars", - state="present"), - test_uss_block_absent=dict(block="", state="absent"), - test_uss_block_absent_with_force_option_as_true=dict(block="", state="absent", force=True), - test_uss_block_absent_with_force_option_as_false=dict(block="", state="absent", force=True), - test_uss_block_replace_insertafter_regex=dict( - insertafter="PYTHON_HOME=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present"), - test_uss_block_replace_insertbefore_regex=dict( - insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", - state="present"), - test_uss_block_replace_insertafter_eof=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present"), - test_uss_block_replace_insertbefore_bof=dict( - insertbefore="BOF", block="# this is file is for setting env vars", - state="present"), - test_uss_block_insert_with_indentation_level_specified=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", indentation=16), - test_uss_block_insert_with_doublequotes=dict( - insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', - marker="// {mark} ANSIBLE MANAGED BLOCK",state="present"), - test_ds_block_insertafter_regex=dict(test_name="T1"), - test_ds_block_insertbefore_regex=dict(test_name="T2"), - test_ds_block_insertafter_eof=dict(test_name="T3"), - test_ds_block_insertbefore_bof=dict(test_name="T4"), - test_ds_block_absent=dict(test_name="T5"), - test_ds_block_tmp_hlq_option=dict( - insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, - tmp_hlq="TMPHLQ"), - test_ds_block_insert_with_force_option_as_true=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", force=True), - test_ds_block_absent_with_force_option_as_true=dict( - block="", state="absent", force=True), - test_ds_block_insert_with_force_option_as_false=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", force=False), - test_ds_block_absent_with_force_option_as_false=dict(block="", state="absent", force=False), - test_ds_block_insert_with_indentation_level_specified=dict(test_name="T7"), - test_ds_block_insertafter_eof_with_backup=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True), - test_ds_block_insertafter_eof_with_backup_name=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", backup=True, backup_name=MVS_BACKUP_DS), - test_ds_block_insertafter_regex_force=dict( - path="",insertafter="ZOAU_ROOT=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present", force=True), - test_ds_block_insertafter_regex_force_fail=dict( - path="",insertafter="ZOAU_ROOT=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present", force=False), - expected=dict(test_uss_block_insertafter_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +EXPECTED_INSERTAFTER_REGEX = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 @@ -275,50 +122,17 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # END ANSIBLE MANAGED BLOCK -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insert_with_doublequotes="""//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M -//USSCMD EXEC PGM=BPXBATCH -//STDERR DD SYSOUT=* -//STDOUT DD SYSOUT=* -//STDPARM DD * -SH ls -la /; -sleep 30; -// BEGIN ANSIBLE MANAGED BLOCK -cat "//OMVSADMI.CAT" -cat "//OMVSADM.COPYMEM.TESTS" > test.txt -// END ANSIBLE MANAGED BLOCK -/* -//""", - test_uss_block_insertbefore_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTBEFORE_REGEX = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # BEGIN ANSIBLE MANAGED BLOCK @@ -327,58 +141,30 @@ unset ZOAU_DIR # END ANSIBLE MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertafter_eof_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTAFTER_EOF = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT # BEGIN ANSIBLE MANAGED BLOCK export ZOAU_ROOT export ZOAU_HOME export ZOAU_DIR -# END ANSIBLE MANAGED BLOCK""", - test_uss_block_insertbefore_bof_defaultmarker="""# BEGIN ANSIBLE MANAGED BLOCK +# END ANSIBLE MANAGED BLOCK""" + +EXPECTED_INSERTBEFORE_BOF = """# BEGIN ANSIBLE MANAGED BLOCK # this is file is for setting env vars # END ANSIBLE MANAGED BLOCK if [ -z STEPLIB ] && tty -s; @@ -388,52 +174,20 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertafter_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTAFTER_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 @@ -442,37 +196,17 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # CLOSE IBM MANAGED BLOCK -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertbefore_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTBEFORE_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # OPEN IBM MANAGED BLOCK @@ -481,58 +215,30 @@ unset ZOAU_DIR # CLOSE IBM MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertafter_eof_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTAFTER_EOF_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT # OPEN IBM MANAGED BLOCK export ZOAU_ROOT export ZOAU_HOME export ZOAU_DIR -# CLOSE IBM MANAGED BLOCK""", - test_uss_block_insertbefore_bof_custommarker="""# OPEN IBM MANAGED BLOCK +# CLOSE IBM MANAGED BLOCK""" + +EXPECTED_INSERTBEFORE_BOF_CUSTOM = """# OPEN IBM MANAGED BLOCK # this is file is for setting env vars # CLOSE IBM MANAGED BLOCK if [ -z STEPLIB ] && tty -s; @@ -542,87 +248,56 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_absent="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_ABSENT = """if [ -z STEPLIB ] && tty -s; +then + export STEPLIB=none + exec -a 0 SHELL +fi +TZ=PST8PDT +export TZ +export MAIL +umask 022 +ZOAU_ROOT=/usr/lpp/zoautil/v100 +PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig +PYTHON_HOME=/usr/lpp/izoda/v110/anaconda +export ZOAU_ROOT""" + +EXPECTED_INSERT_WITH_INDENTATION = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +# BEGIN ANSIBLE MANAGED BLOCK + export ZOAU_ROOT + export ZOAU_HOME + export ZOAU_DIR +# END ANSIBLE MANAGED BLOCK""" + +EXPECTED_REPLACE_INSERTAFTER = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda # BEGIN ANSIBLE MANAGED BLOCK @@ -630,36 +305,18 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # END ANSIBLE MANAGED BLOCK -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertbefore_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_INSERTBEFORE = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig # BEGIN ANSIBLE MANAGED BLOCK unset ZOAU_ROOT @@ -667,52 +324,28 @@ unset ZOAU_DIR # END ANSIBLE MANAGED BLOCK PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_eof_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_EOF_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT # BEGIN ANSIBLE MANAGED BLOCK export ZOAU_ROOT export ZOAU_HOME export ZOAU_DIR -# END ANSIBLE MANAGED BLOCK""", - test_uss_block_replace_insertbefore_bof_defaultmarker="""# BEGIN ANSIBLE MANAGED BLOCK +# END ANSIBLE MANAGED BLOCK""" + +EXPECTED_REPLACE_BOF_CUSTOM = """# BEGIN ANSIBLE MANAGED BLOCK # this is file is for setting env vars # END ANSIBLE MANAGED BLOCK if [ -z STEPLIB ] && tty -s; @@ -722,53 +355,23 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_EOF_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda # OPEN IBM MANAGED BLOCK @@ -776,36 +379,18 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # CLOSE IBM MANAGED BLOCK -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertbefore_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_BOF_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig # OPEN IBM MANAGED BLOCK unset ZOAU_ROOT @@ -813,126 +398,71 @@ unset ZOAU_DIR # CLOSE IBM MANAGED BLOCK PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_eof_custommarker="""if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT -# OPEN IBM MANAGED BLOCK -export ZOAU_ROOT -export ZOAU_HOME -export ZOAU_DIR -# CLOSE IBM MANAGED BLOCK""", - test_uss_block_replace_insertbefore_bof_custommarker="""# OPEN IBM MANAGED BLOCK -# this is file is for setting env vars -# CLOSE IBM MANAGED BLOCK -if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insert_with_indentation_level_specified="""if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT +export ZOAU_ROOT""" + +EXPECTED_DOUBLE_QUOTES = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//USSCMD EXEC PGM=BPXBATCH +//STDERR DD SYSOUT=* +//STDOUT DD SYSOUT=* +//STDPARM DD * +SH ls -la /; +sleep 30; +// BEGIN ANSIBLE MANAGED BLOCK +cat "//OMVSADMI.CAT" +cat "//OMVSADM.COPYMEM.TESTS" > test.txt +// END ANSIBLE MANAGED BLOCK +/* +//""" + +EXPECTED_ENCODING = """SIMPLE LINE TO VERIFY # BEGIN ANSIBLE MANAGED BLOCK - export ZOAU_ROOT - export ZOAU_HOME - export ZOAU_DIR -# END ANSIBLE MANAGED BLOCK""",), -) +Insert this string +# END ANSIBLE MANAGED BLOCK""" + +""" +Note: zos_encode module uses USS cp command for copying from USS file to MVS data set which only supports IBM-1047 charset. +I had to develop and use a new tool for converting and copying to data set in order to set up environment for tests to publish results on Jira. +Until the issue be addressed I disable related tests. +""" +ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] + +# supported data set types +DS_TYPE = ['SEQ', 'PDS', 'PDSE'] + +# not supported data set types +NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] + +USS_BACKUP_FILE = "/tmp/backup.tmp" +BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] + +def set_uss_environment(ansible_zos_module, CONTENT, FILE): + hosts = ansible_zos_module + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) + hosts.all.file(path=FILE, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, FILE)) + +def remove_uss_environment(ansible_zos_module): + hosts = ansible_zos_module + hosts.all.shell(cmd="rm -rf" + TEST_FOLDER_BLOCKINFILE) + +def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT): + hosts = ansible_zos_module + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) + hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) + if DS_TYPE in ["PDS", "PDSE"]: + DS_FULL_NAME = DS_NAME + "(MEM)" + hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) + else: + DS_FULL_NAME = DS_NAME + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), DS_FULL_NAME) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + return DS_FULL_NAME + +def remove_ds_environment(ansible_zos_module, DS_NAME): + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DS_NAME, state="absent") ######################### # USS test cases @@ -941,314 +471,461 @@ @pytest.mark.uss def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertbefore_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_insertbefore_regex_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertbefore_bof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_insertbefore_bof_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertafter_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertafter_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertafter_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) + @pytest.mark.uss def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertbefore_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertbefore_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertbefore_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertafter_eof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertafter_eof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertafter_eof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertbefore_bof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertbefore_bof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertbefore_bof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_absent_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_absent_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_absent"], - TEST_INFO["expected"]["test_uss_block_absent"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(block="", state="absent") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_absent_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_absent"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_absent_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_absent"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(block="", state="absent") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertafter_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_regex_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_regex_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_replace_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_eof_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_EOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_bof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_replace_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_bof_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_BOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertafter_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertafter_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertafter_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_EOF_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertbefore_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_BOF_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertafter_eof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertafter_eof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertafter_eof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertbefore_bof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_bof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_bof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT - - -@pytest.mark.uss -def test_uss_block_insert_with_force_option_as_true(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insert_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - - -@pytest.mark.uss -def test_uss_block_insert_with_force_option_as_false(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insert_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - - -@pytest.mark.uss -def test_uss_block_absent_with_force_option_as_true(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_absent_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_absent_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_absent"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT - - -@pytest.mark.uss -def test_uss_block_absent_with_force_option_as_false(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_absent_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_absent_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_absent"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): - UssGeneral( - "test_uss_block_insert_with_indentation_level_specified", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insert_with_indentation_level_specified"], - TEST_INFO["expected"]["test_uss_block_insert_with_indentation_level_specified"]) + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insert_with_doublequotes(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DOUBLEQUOTES - UssGeneral( - "test_uss_block_insert_with_doublequotes", ansible_zos_module,TEST_ENV, - TEST_INFO["test_uss_block_insert_with_doublequotes"], - TEST_INFO["expected"]["test_uss_block_insert_with_doublequotes"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DOUBLEQUOTES + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES + finally: + remove_uss_environment(ansible_zos_module) + @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT try: - backup_name = USS_BACKUP_FILE - uss_result = UssGeneral( - "test_uss_block_insertafter_eof_with_backup", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_eof_with_backup"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - for result in uss_result.contacted.values(): + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): backup_name = result.get("backup_name") + assert result.get("changed") == 1 assert backup_name is not None + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: ansible_zos_module.all.file(path=backup_name, state="absent") + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=USS_BACKUP_FILE) + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT try: - uss_result = UssGeneral( - "test_uss_block_insertafter_eof_with_backup_name", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_eof_with_backup_name"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - for result in uss_result.contacted.values(): + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 assert result.get("backup_name") == USS_BACKUP_FILE - cmdStr = "cat {0}".format(result.get("backup_name")) - results = ansible_zos_module.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - assert result.get("stdout") == TEST_ENV["TEST_CONT"] + cmdStr = "cat {0}".format(USS_BACKUP_FILE) + results = ansible_zos_module.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + assert result.get("stdout") == TEST_CONTENT + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: ansible_zos_module.all.file(path=USS_BACKUP_FILE, state="absent") + remove_uss_environment(ansible_zos_module) ######################### @@ -1258,267 +935,430 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertafter_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"] - ) +def test_ds_block_insertafter_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + test_name = "DST1" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertbefore_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_insertbefore_regex_defaultmarker"] - ) +def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + test_name = "DST2" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertafter_eof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) +def test_ds_block_insertafter_eof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + test_name = "DST3" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertbefore_bof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_bof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_insertbefore_bof_defaultmarker"] - ) +def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + test_name = "DST4" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_regex_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + test_name = "DST5" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_regex_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + test_name = "DST6" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_eof_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + test_name = "DST7" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_bof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_bof_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + test_name = "DST8" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_absent(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_absent"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_absent"], - TEST_INFO["expected"]["test_uss_block_absent"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_absent(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(block="", state="absent") + test_name = "DST9" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_tmp_hlq_option(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - test_name = "T6" +def test_ds_tmp_hlq_option(ansible_zos_module): + # This TMPHLQ only works with sequential datasets + hosts = ansible_zos_module + ds_type = "SEQ" + params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") kwargs = dict(backup_name=r"TMPHLQ\..") - DsGeneralResultKeyMatchesRegex( - test_name, ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_tmp_hlq_option"], - **kwargs - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insert_with_force_option_as_true(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - "T6", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insert_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_absent_with_force_option_as_true(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - "T7", ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_absent_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_absent"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + test_name = "DST10" + temp_file = "/tmp/zos_lineinfile/" + test_name + content = TEST_CONTENT + try: + hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + if len(hlq) > 8: + hlq = hlq[:8] + ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(ds_full_name)) + for result in results.contacted.values(): + print(result) + assert int(result.get("stdout")) != 0 + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + for key in kwargs: + assert re.match(kwargs.get(key), result.get(key)) + finally: + hosts.all.zos_data_set(name=ds_full_name, state="absent") @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insert_with_force_option_as_false(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - "T8", ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_insert_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) +def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) + test_name = "DST11" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_absent_with_force_option_as_false(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - "T9", ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_absent_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_absent"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +@pytest.mark.parametrize("backup_name", BACKUP_OPTIONS) +def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup_name): + hosts = ansible_zos_module + ds_type = dstype + params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) + if backup_name: + params["backup_name"] = backup_name + test_name = "DST12" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + if backup_name: + backup_ds_name = result.get("backup_name") + assert backup_ds_name is not None + else: + backup_ds_name = result.get("backup_name") + assert backup_ds_name is not None + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) + if backup_name: + ansible_zos_module.all.zos_data_set(name="BLOCKIF.TEST.BACKUP", state="absent") + ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") + else: + ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insert_with_indentation_level_specified"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insert_with_indentation_level_specified"], - TEST_INFO["expected"]["test_uss_block_insert_with_indentation_level_specified"] - ) - @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("backup_name", BACKUP_OPTIONS) -def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, encoding, backup_name): +def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="",insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=True) + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + content = TEST_CONTENT + if ds_type == "SEQ": + params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) try: - backup_ds_name = MVS_BACKUP_DS - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_INFO["test_ds_block_insertafter_eof_with_backup"]["backup_name"] = backup_name - ds_result = DsGeneral( - "T10", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insertafter_eof_with_backup"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] ) - for result in ds_result.contacted.values(): - backup_ds_name = result.get("backup_name") - assert backup_ds_name is not None - if encoding == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(backup_ds_name) - results = ansible_zos_module.all.shell(cmd=cmdStr) - print(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout") == TEST_ENV["TEST_CONT"] + # write memeber to verify cases + if ds_type in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == True + results = hosts.all.shell(cmd=r"""cat "//'{0}'" """.format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: - ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") +######################### +# Encoding tests +######################### +@pytest.mark.uss +@pytest.mark.parametrize("encoding", ENCODING) +def test_uss_encoding(ansible_zos_module, encoding): + hosts = ansible_zos_module + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params["encoding"] = encoding + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) + hosts.all.file(path=full_path, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insertafter_regex_force"], - TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"] - ) - - +@pytest.mark.parametrize("encoding", ["IBM-1047"]) +def test_ds_encoding(ansible_zos_module, encoding, dstype): + hosts = ansible_zos_module + ds_type = dstype + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params["encoding"] = encoding + test_name = "DST13" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.zos_data_set(name=ds_name, type=ds_type) + if ds_type in ["PDS", "PDSE"]: + ds_full_name = ds_name + "(MEM)" + hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) + else: + ds_full_name = ds_name + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + temp_file) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_ds_environment(ansible_zos_module, ds_name) ######################### # Negative tests ######################### @@ -1527,34 +1367,43 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): @pytest.mark.ds def test_not_exist_ds_block_insertafter_regex(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO["test_uss_block_insertafter_regex"] - test_info["path"] = "BIFTEST.NOTEXIST.SEQ" - results = hosts.all.zos_blockinfile(**test_info) + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["path"] = "BIFTEST.NOTEXIST.SEQ" + results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert "does NOT exist" in result.get("msg") @pytest.mark.ds def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): - TEST_ENV["DS_TYPE"] = 'SEQ' - TEST_ENV["ENCODING"] = 'IBM-1047' - TEST_INFO["test_uss_block_insertafter_eof"]["insertafter"] = 'SOME_NON_EXISTING_PATTERN' - DsGeneral( - TEST_INFO["test_ds_block_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) + hosts = ansible_zos_module + ds_type = 'SEQ' + params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' + test_name = "DST13" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds def test_ds_block_insertafter_regex_wrongmarker(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO["test_uss_block_insertafter_regex"] - test_info["path"] = "BIFTEST.NOTEXIST.SEQ" - test_info["marker"] = '# MANAGED BLOCK' - results = hosts.all.zos_blockinfile(**test_info) - del test_info["marker"] + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["path"] = "BIFTEST.NOTEXIST.SEQ" + params["marker"] = '# MANAGED BLOCK' + results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert "marker should have {mark}" in result.get("msg") @@ -1562,18 +1411,74 @@ def test_ds_block_insertafter_regex_wrongmarker(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize("dstype", NS_DS_TYPE) def test_ds_not_supported(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsNotSupportedHelper( - TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_regex"] - ) + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + test_name = "DST14" + ds_name = test_name.upper() + "." + ds_type + try: + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + assert len(hlq) <= 8 or hlq != '' + ds_name = test_name.upper() + "." + ds_type + results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + for result in results.contacted.values(): + assert result.get("changed") is True + params["path"] = ds_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("msg") == "VSAM data set type is NOT supported" + finally: + hosts.all.zos_data_set(name=ds_name, state="absent") @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsGeneralForceFail( - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insertafter_regex_force_fail"], - ) + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=False) + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + content = TEST_CONTENT + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] + ) + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == False + assert result.get("failed") == True + finally: + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 85f4184af..6a29c79b8 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -12,618 +12,238 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ibm_zos_core.tests.helpers.zos_lineinfile_helper import ( - UssGeneral, - DsGeneral, - DsNotSupportedHelper, - DsGeneralResultKeyMatchesRegex, - DsGeneralForceFail, - DsGeneralForce, -) -import os -import sys +from shellescape import quote +import time +import re import pytest +import inspect __metaclass__ = type -TEST_CONTENT = """if [ -z STEPLIB ] && tty -s; +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" +TEST_FOLDER_LINEINFILE = "/tmp/ansible-core-tests/zos_lineinfile/" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +TEST_CONTENT="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT""" -# supported data set types -DS_TYPE = ['SEQ', 'PDS', 'PDSE'] -# not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] -ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] - -TEST_ENV = dict( - TEST_CONT=TEST_CONTENT, - TEST_DIR="/tmp/zos_lineinfile/", - TEST_FILE="", - DS_NAME="", - DS_TYPE="", - ENCODING="", -) - -TEST_INFO = dict( - test_uss_line_replace=dict( - path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present"), - test_uss_line_insertafter_regex=dict( - insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present"), - test_uss_line_insertbefore_regex=dict( - insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present"), - test_uss_line_insertafter_eof=dict( - insertafter="EOF", line="export ZOAU_ROOT", state="present"), - test_uss_line_insertbefore_bof=dict( - insertbefore="BOF", line="# this is file is for setting env vars", - state="present"), - test_uss_line_replace_match_insertafter_ignore=dict( - regexp="ZOAU_ROOT=", insertafter="PATH=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present"), - test_uss_line_replace_match_insertbefore_ignore=dict( - regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", - state="present"), - test_uss_line_replace_nomatch_insertafter_match=dict( - regexp="abcxyz", insertafter="ZOAU_ROOT=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present"), - test_uss_line_replace_nomatch_insertbefore_match=dict( - regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", - state="present"), - test_uss_line_replace_nomatch_insertafter_nomatch=dict( - regexp="abcxyz", insertafter="xyzijk", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present"), - test_uss_line_replace_nomatch_insertbefore_nomatch=dict( - regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", - state="present"), - test_uss_line_absent=dict(regexp="ZOAU_ROOT=", line="", state="absent"), - test_ds_line_replace=dict(test_name="T1"), - test_ds_line_insertafter_regex=dict(test_name="T2"), - test_ds_line_insertbefore_regex=dict(test_name="T3"), - test_ds_line_insertafter_eof=dict(test_name="T4"), - test_ds_line_insertbefore_bof=dict(test_name="T5"), - test_ds_line_replace_match_insertafter_ignore=dict(test_name="T6"), - test_ds_line_replace_match_insertbefore_ignore=dict(test_name="T7"), - test_ds_line_replace_nomatch_insertafter_match=dict(test_name="T8"), - test_ds_line_replace_nomatch_insertbefore_match=dict(test_name="T9"), - test_ds_line_replace_nomatch_insertafter_nomatch=dict(test_name="T10"), - test_ds_line_replace_nomatch_insertbefore_nomatch=dict(test_name="T11"), - test_ds_line_absent=dict(test_name="T12"), - test_ds_line_tmp_hlq_option=dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ"), - test_ds_line_force=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=True), - test_ds_line_force_fail=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=False), - test_ds_line_replace_force=dict(path="",regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present",force=True), - test_ds_line_insertafter_regex_force=dict(path="",insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present",force=True), - test_ds_line_insertbefore_regex_force=dict(path="",insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present",force=True), - test_ds_line_insertbefore_bof_force=dict(path="",insertbefore="BOF", line="# this is file is for setting env vars", - state="present",force=True), - test_ds_line_replace_match_insertafter_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertafter="PATH=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), - test_ds_line_replace_match_insertbefore_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", - state="present",force=True), - test_ds_line_replace_nomatch_insertafter_match_force=dict(path="",regexp="abcxyz", insertafter="ZOAU_ROOT=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), - test_ds_line_replace_nomatch_insertbefore_match_force=dict(path="",regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", - state="present",force=True), - expected=dict(test_uss_line_replace="""if [ -z STEPLIB ] && tty -s; +EXPECTED_REPLACE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_insertafter_regex="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_INSERTAFTER_REGEX="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_insertbefore_regex="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_INSERTBEFORE_REGEX="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 unset ZOAU_ROOT ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_insertafter_eof="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_INSERTAFTER_EOF="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT -export ZOAU_ROOT""", - test_uss_line_insertbefore_bof="""# this is file is for setting env vars +export ZOAU_ROOT""" + +EXPECTED_INSERTBEFORE_BOF="""# this is file is for setting env vars if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_match_insertafter_ignore="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_INSERTAFTER_IGNORE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_match_insertbefore_ignore="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_INSERTBEFORE_IGNORE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 unset ZOAU_ROOT -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_nomatch_insertafter_match="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_NOMATCH_INSERTAFTER="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_nomatch_insertbefore_match="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_NOMATCH_INSERTBEFORE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 unset ZOAU_ROOT ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_nomatch_insertafter_nomatch="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT -ZOAU_ROOT=/mvsutil-develop_dsed""", - test_uss_line_replace_nomatch_insertbefore_nomatch="""if [ -z STEPLIB ] && tty -s; +ZOAU_ROOT=/mvsutil-develop_dsed""" + +EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT -unset ZOAU_ROOT""", - test_uss_line_absent="""if [ -z STEPLIB ] && tty -s; +unset ZOAU_ROOT""" + +EXPECTED_ABSENT="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_quoted="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_QUOTED="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT="/mvsutil-develop_dsed" -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_ds_line_force="""if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT -export ZOAU_ROOT"""), -) +export _BPXK_AUTOCVT""" + +EXPECTED_ENCODING="""SIMPLE LINE TO VERIFY +Insert this string""" +def set_uss_environment(ansible_zos_module, CONTENT, FILE): + hosts = ansible_zos_module + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) + hosts.all.file(path=FILE, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, FILE)) + +def remove_uss_environment(ansible_zos_module): + hosts = ansible_zos_module + hosts.all.shell(cmd="rm -rf " + TEST_FOLDER_LINEINFILE) + +def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT): + hosts = ansible_zos_module + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) + hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) + if DS_TYPE in ["PDS", "PDSE"]: + DS_FULL_NAME = DS_NAME + "(MEM)" + hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) + else: + DS_FULL_NAME = DS_NAME + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), DS_FULL_NAME) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + return DS_FULL_NAME + +def remove_ds_environment(ansible_zos_module, DS_NAME): + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DS_NAME, state="absent") +# supported data set types +DS_TYPE = ['SEQ', 'PDS', 'PDSE'] +# not supported data set types +NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] +# The encoding will be only use on a few test +ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] ######################### # USS test cases @@ -632,462 +252,762 @@ @pytest.mark.uss def test_uss_line_replace(ansible_zos_module): - UssGeneral( - "test_uss_line_replace", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace"]) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertafter_regex(ansible_zos_module): - UssGeneral( - "test_uss_line_insertafter_regex", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertafter_regex"], - TEST_INFO["expected"]["test_uss_line_insertafter_regex"]) + hosts = ansible_zos_module + params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertbefore_regex(ansible_zos_module): - UssGeneral( - "test_uss_line_insertbefore_regex", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_line_insertbefore_regex"]) + hosts = ansible_zos_module + params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertafter_eof(ansible_zos_module): - UssGeneral( - "test_uss_line_insertafter_eof", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_insertafter_eof"], - TEST_INFO["expected"]["test_uss_line_insertafter_eof"]) + hosts = ansible_zos_module + params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertbefore_bof(ansible_zos_module): - UssGeneral( - "test_uss_line_insertbefore_bof", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_line_insertbefore_bof"]) + hosts = ansible_zos_module + params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_match_insertafter_ignore", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace_match_insertafter_ignore"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] - ) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_match_insertbefore_ignore", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace_match_insertbefore_ignore"], - TEST_INFO["expected"] - ["test_uss_line_replace_match_insertbefore_ignore"] - ) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertafter_match", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace_nomatch_insertafter_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_match"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertbefore_match", ansible_zos_module, - TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_match"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertafter_nomatch", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertafter_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_nomatch"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertbefore_nomatch", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_nomatch"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_absent(ansible_zos_module): - UssGeneral( - "test_uss_line_absent", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_absent"], - TEST_INFO["expected"]["test_uss_line_absent"]) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", line="", state="absent") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_quoted_escaped(ansible_zos_module): - TEST_INFO["test_uss_line_replace"]["line"] = 'ZOAU_ROOT=\"/mvsutil-develop_dsed\"' - UssGeneral( - "test_uss_line_replace", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace_quoted"]) + hosts = ansible_zos_module + params = dict(path="", regexp="ZOAU_ROOT=", line='ZOAU_ROOT=\"/mvsutil-develop_dsed\"', state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_QUOTED + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): - TEST_INFO["test_uss_line_replace"]["line"] = 'ZOAU_ROOT="/mvsutil-develop_dsed"' - UssGeneral( - "test_uss_line_replace", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace_quoted"]) + hosts = ansible_zos_module + params = dict(path="", regexp="ZOAU_ROOT=", line='ZOAU_ROOT="/mvsutil-develop_dsed"', state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_QUOTED + finally: + remove_uss_environment(ansible_zos_module) ######################### # Dataset test cases ######################### +# Now force is parameter to change witch function to call in the helper and alter the declaration by add the force or a test name required. +# without change the original description or the other option is that at the end of the test get back to original one. @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertafter_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertafter_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertafter_regex"], - TEST_INFO["expected"]["test_uss_line_insertafter_regex"] - ) +def test_ds_line_insertafter_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST1" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertbefore_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertbefore_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_line_insertbefore_regex"] - ) +def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + test_name = "DST2" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertafter_eof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertafter_eof"], - TEST_INFO["expected"]["test_uss_line_insertafter_eof"] - ) - +def test_ds_line_insertafter_eof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + test_name = "DST3" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertbefore_bof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertbefore_bof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_line_insertbefore_bof"] - ) +def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") + test_name = "DST4" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_match_insertafter_ignore( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_match_insertafter_ignore"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_match_insertafter_ignore"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] - ) +def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST5" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_match_insertbefore_ignore( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_match_insertbefore_ignore"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_match_insertbefore_ignore"], - TEST_INFO["expected"] - ["test_uss_line_replace_match_insertbefore_ignore"] - ) +def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") + test_name = "DST6" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertafter_match( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertafter_match"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertafter_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_match"] - ) +def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST7" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertbefore_match( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertbefore_match"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_match"] - ) +def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + test_name = "DST8" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertafter_nomatch( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertafter_nomatch"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertafter_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_nomatch"] - ) +def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST9" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertbefore_nomatch( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertbefore_nomatch"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_nomatch"] - ) +def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") + test_name = "DST10" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_absent(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_absent"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_absent"], - TEST_INFO["expected"]["test_uss_line_absent"] - ) +def test_ds_line_absent(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="ZOAU_ROOT=", line="", state="absent") + test_name = "DST11" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_tmp_hlq_option(ansible_zos_module, encoding): +def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets - TEST_ENV["DS_TYPE"] = 'SEQ' - TEST_ENV["ENCODING"] = encoding - test_name = "T12" + hosts = ansible_zos_module + ds_type = "SEQ" kwargs = dict(backup_name=r"TMPHLQ\..") - DsGeneralResultKeyMatchesRegex( - test_name, ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_line_tmp_hlq_option"], - **kwargs - ) - - + params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") + test_name = "DST12" + temp_file = "/tmp/zos_lineinfile/" + test_name + content = TEST_CONTENT + try: + hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + if len(hlq) > 8: + hlq = hlq[:8] + ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(ds_full_name)) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + for key in kwargs: + assert re.match(kwargs.get(key), result.get(key)) + finally: + hosts.all.zos_data_set(name=ds_full_name, state="absent") + + +## Non supported test cases @pytest.mark.ds @pytest.mark.parametrize("dstype", NS_DS_TYPE) def test_ds_not_supported(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsNotSupportedHelper( - TEST_INFO["test_ds_line_replace"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace"] - ) - + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST13" + ds_name = test_name.upper() + "." + ds_type + try: + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + assert len(hlq) <= 8 or hlq != '' + ds_name = test_name.upper() + "." + ds_type + results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + for result in results.contacted.values(): + assert result.get("changed") is True + params["path"] = ds_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("msg") == "VSAM data set type is NOT supported" + finally: + hosts.all.zos_data_set(name=ds_name, state="absent") -######################### -# Dataset test cases with force -######################### @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) @pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_force"], - TEST_INFO["expected"]["test_ds_line_force"] - ) +def test_ds_line_force(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="True") + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + content = TEST_CONTENT + if ds_type == "SEQ": + params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] + ) + # write memeber to verify cases + if ds_type in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == True + results = hosts.all.shell(cmd=r"""cat "//'{0}'" """.format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE + finally: + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_force_fail(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForceFail( - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_line_force_fail"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_force"], - TEST_INFO["expected"]["test_uss_line_replace"] - ) +@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) +def test_ds_line_force_fail(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="False") + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + content = TEST_CONTENT + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] + ) + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == False + assert result.get("failed") == True + finally: + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_insertafter_regex_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_insertafter_regex_force"], - TEST_INFO["expected"]["test_uss_line_insertafter_regex"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_insertbefore_regex_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_insertbefore_regex_force"], - TEST_INFO["expected"]["test_uss_line_insertbefore_regex"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_insertbefore_bof_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_insertbefore_bof_force"], - TEST_INFO["expected"]["test_uss_line_insertbefore_bof"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_match_insertafter_ignore_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_match_insertafter_ignore_force"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_match_insertbefore_ignore_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_match_insertbefore_ignore_force"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertbefore_ignore"] - ) - +######################### +# Encoding tests +######################### -@pytest.mark.ds +@pytest.mark.uss @pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertafter_match_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_nomatch_insertafter_match_force"], - TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertafter_match"] - ) +def test_uss_encoding(ansible_zos_module, encoding): + hosts = ansible_zos_module + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params["encoding"] = encoding + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) + hosts.all.file(path=full_path, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) @pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertbefore_match_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_nomatch_insertbefore_match_force"], - TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertbefore_match"] - ) +@pytest.mark.parametrize("encoding", ["IBM-1047"]) +def test_ds_encoding(ansible_zos_module, encoding, dstype): + hosts = ansible_zos_module + ds_type = dstype + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params["encoding"] = encoding + test_name = "DST13" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.zos_data_set(name=ds_name, type=ds_type) + if ds_type in ["PDS", "PDSE"]: + ds_full_name = ds_name + "(MEM)" + hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) + else: + ds_full_name = ds_name + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + temp_file) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_ds_environment(ansible_zos_module, ds_name) \ No newline at end of file diff --git a/tests/helpers/zos_blockinfile_helper.py b/tests/helpers/zos_blockinfile_helper.py deleted file mode 100644 index f5aa178fe..000000000 --- a/tests/helpers/zos_blockinfile_helper.py +++ /dev/null @@ -1,345 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) IBM Corporation 2020, 2022, 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function -from shellescape import quote -from pprint import pprint -import time -import re - - -__metaclass__ = type - - -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" - -c_pgm="""#include <stdio.h> -#include <stdlib.h> -#include <string.h> -int main(int argc, char** argv) -{ - char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); - FILE* member; - member = fopen(dsname, "rb,type=record"); - sleep(300); - fclose(member); - return 0; -} -""" - -call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M -//LOCKMEM EXEC PGM=BPXBATCH -//STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' -//STDIN DD DUMMY -//STDOUT DD SYSOUT=* -//STDERR DD SYSOUT=* -//""" - - -def set_uss_test_env(test_name, hosts, test_env): - test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name - try: - hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"])) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_env["TEST_FILE"])) - except Exception: - clean_uss_test_env(test_env["TEST_DIR"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_uss_test_env(test_dir, hosts): - try: - hosts.all.shell(cmd="rm -rf " + test_dir) - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def UssGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_uss_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["TEST_FILE"] - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - pprint(result) - assert result.get("changed") == 1 - cmdStr = "cat {0}".format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - pprint(result) - assert result.get("stdout") == expected - clean_uss_test_env(test_env["TEST_DIR"], hosts) - return blockinfile_results - - -def set_ds_test_env(test_name, hosts, test_env): - TEMP_FILE = test_env["TEST_DIR"] + test_name - hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"])) - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - if len(hlq) > 8: - hlq = hlq[:8] - test_env["DS_NAME"] = hlq + "." + test_name.upper() + "." + test_env["DS_TYPE"] - try: - hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"], replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - test_env["DS_NAME"] = test_env["DS_NAME"] + "(MEM)" - hosts.all.zos_data_set(name=test_env["DS_NAME"], state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - - if test_env["ENCODING"] != "IBM-1047": - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - else: - hosts.all.shell(cmd=cmdStr) - hosts.all.shell(cmd="rm -rf " + test_env["TEST_DIR"]) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - except Exception: - clean_ds_test_env(test_env["DS_NAME"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_ds_test_env(ds_name, hosts): - ds_name = ds_name.replace("(MEM)", "") - try: - hosts.all.zos_data_set(name=ds_name, state="absent") - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def DsGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - pprint(result) - assert result.get("changed") == 1 - if test_env["ENCODING"] == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - pprint(result) - #assert result.get("stdout") == expected - assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') - clean_ds_test_env(test_env["DS_NAME"], hosts) - return blockinfile_results - - -def DsNotSupportedHelper(test_name, ansible_zos_module, test_env, test_info): - hosts = ansible_zos_module - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - test_env["DS_NAME"] = hlq + "." + test_name.upper() + "." + test_env["DS_TYPE"] - results = hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"], replace='yes') - for result in results.contacted.values(): - pprint(result) - assert result.get("changed") is True - test_info["path"] = test_env["DS_NAME"] - results = hosts.all.zos_blockinfile(**test_info) - for result in results.contacted.values(): - pprint(result) - assert result.get("changed") is False - assert result.get("msg") == "VSAM data set type is NOT supported" - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test_info, **kwargs): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - results = hosts.all.zos_blockinfile(**test_info) - for result in results.contacted.values(): - pprint(result) - for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralForce(ansible_zos_module, test_env, test_info, expected): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - if test_env["DS_TYPE"] == "SEQ": - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - else: - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - hosts = ansible_zos_module - try: - # set up: - # create pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": test_env["DS_NAME"], - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # write memeber to verify cases - # print(test_env["TEST_CONT"]) - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - hosts.all.shell(cmd=cmdStr) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - if test_env["ENCODING"] != 'IBM-1047': - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - - # pause to ensure c code acquires lock - time.sleep(5) - - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - assert result.get("changed") == True - - - if test_env["ENCODING"] == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - pprint(result) - assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') - else: - cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == True - finally: - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - return blockinfile_results - - -def DsGeneralForceFail(ansible_zos_module, test_env, test_info): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - hosts = ansible_zos_module - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - try: - # set up: - # create pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # write memeber to verify cases - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_info["path"])) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - - # pause to ensure c code acquires lock - time.sleep(5) - - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - pprint(result) - assert result.get("changed") == False - assert result.get("failed") == True - finally: - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file diff --git a/tests/helpers/zos_lineinfile_helper.py b/tests/helpers/zos_lineinfile_helper.py deleted file mode 100644 index bac392e80..000000000 --- a/tests/helpers/zos_lineinfile_helper.py +++ /dev/null @@ -1,340 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) IBM Corporation 2020, 2022, 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function -from shellescape import quote -import time -from pprint import pprint -import re - -__metaclass__ = type - -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" - -c_pgm="""#include <stdio.h> -#include <stdlib.h> -#include <string.h> -int main(int argc, char** argv) -{ - char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); - FILE* member; - member = fopen(dsname, "rb,type=record"); - sleep(300); - fclose(member); - return 0; -} -""" - -call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M -//LOCKMEM EXEC PGM=BPXBATCH -//STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' -//STDIN DD DUMMY -//STDOUT DD SYSOUT=* -//STDERR DD SYSOUT=* -//""" - - -def set_uss_test_env(test_name, hosts, test_env): - test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name - try: - hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"])) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_env["TEST_FILE"])) - except Exception: - clean_uss_test_env(test_env["TEST_DIR"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_uss_test_env(test_dir, hosts): - try: - hosts.all.shell(cmd="rm -rf " + test_dir) - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def UssGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_uss_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["TEST_FILE"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == 1 - cmdStr = "cat {0}".format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout") == expected - clean_uss_test_env(test_env["TEST_DIR"], hosts) - - -def set_ds_test_env(test_name, hosts, test_env): - TEMP_FILE = "/tmp/" + test_name - """ - encoding = test_env["ENCODING"].replace("-", "").replace(".", "").upper() - try: - int(encoding[0]) - encoding = "E" + encoding - except: - pass - if len(encoding) > 7: - encoding = encoding[:4] + encoding[-4:] - """ - # simplifying dataset name, zos_encode seems to have issues with some dataset names (can be from ZOAU) - encoding = "ENC" - test_env["DS_NAME"] = test_name.upper() + "." + encoding + "." + test_env["DS_TYPE"] - - try: - hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"]) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - test_env["DS_NAME"] = test_env["DS_NAME"] + "(MEM)" - hosts.all.zos_data_set(name=test_env["DS_NAME"], state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - - if test_env["ENCODING"] != "IBM-1047": - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - else: - hosts.all.shell(cmd=cmdStr) - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - except Exception: - clean_ds_test_env(test_env["DS_NAME"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_ds_test_env(ds_name, hosts): - ds_name = ds_name.replace("(MEM)", "") - try: - hosts.all.zos_data_set(name=ds_name, state="absent") - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def DsGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == 1 - if test_env["ENCODING"] == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsNotSupportedHelper(test_name, ansible_zos_module, test_env, test_info): - hosts = ansible_zos_module - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - test_env["DS_NAME"] = test_name.upper() + "." + test_name.upper() + "." + test_env["DS_TYPE"] - results = hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"], replace='yes') - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is True - test_info["path"] = test_env["DS_NAME"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("msg") == "VSAM data set type is NOT supported" - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test_info, **kwargs): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralForce(ansible_zos_module, test_env, test_text, test_info, expected): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - if test_env["DS_TYPE"] == "SEQ": - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - else: - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - hosts = ansible_zos_module - try: - # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_text, TEMP_FILE)) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": test_env["DS_NAME"], - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # write memeber to verify cases - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - hosts.all.shell(cmd=cmdStr) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - if test_env["ENCODING"] != 'IBM-1047': - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - - # pause to ensure c code acquires lock - time.sleep(5) - # call line infile to see results - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - - if test_env["ENCODING"] == 'IBM-1047': - cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout") == expected - else: - cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == True - #assert result.get("stdout") == expected - - finally: - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - -def DsGeneralForceFail(ansible_zos_module, test_env, test_info): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - hosts = ansible_zos_module - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - try: - # set up: - # create pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - # pause to ensure c code acquires lock - time.sleep(5) - # call line infile to see results - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == False - assert result.get("failed") == True - finally: - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From 0dc808736b9f5aba7f5f176896529ae539987ea0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 18 Jul 2023 14:10:57 -0600 Subject: [PATCH 144/413] Functional tso command test cases currently can not be run concurrently#880 (#895) * Remove all dependent test and summary in one * Add fragment * Solve multiple commands and text about the explanation of testing * Change variables to be accurate * Remove import do not used * Add comments * Solve typos and writting --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> --- ..._currently_can_not_be_run_concurrently.yml | 6 + .../modules/test_zos_tso_command_func.py | 115 +++++++----------- 2 files changed, 47 insertions(+), 74 deletions(-) create mode 100644 changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml diff --git a/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml b/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml new file mode 100644 index 000000000..64ab4871c --- /dev/null +++ b/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml @@ -0,0 +1,6 @@ +trivial: +- zos_tso_command - Test suite was set up to run sequentially such that + certain tests relied on prior test cases. The new changes combine those + inter-dependent test cases into a single test case so that each individual + test case can now be run stand-alone. + (https://github.com/ansible-collections/ibm_zos_core/pull/895) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index dbdf888f4..44436d3da 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -15,21 +15,18 @@ __metaclass__ = type -import os -import sys -import warnings - import ansible.constants import ansible.errors import ansible.utils -import pytest +DEFAULT_TEMP_DATASET="imstestl.ims1.temp.ps" def test_zos_tso_command_run_help(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=["help"]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True @@ -45,89 +42,54 @@ def test_zos_tso_command_long_command_128_chars(ansible_zos_module): ] results = hosts.all.zos_tso_command(commands=command_string) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True -# The happy path test -# Run a long tso command to allocate a dataset. -def test_zos_tso_command_long_unauth_command_116_chars(ansible_zos_module): +def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): hosts = ansible_zos_module command_string = [ - "alloc da('imstestl.ims1.temp.ps') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)" + "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(DEFAULT_TEMP_DATASET) ] - results = hosts.all.zos_tso_command(commands=command_string) - for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 - assert result.get("changed") is True - - -# The positive path test -def test_zos_tso_command_auth_command_listds(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["LISTDS 'imstestl.ims1.temp.ps'"]) - for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 - assert result.get("changed") is True - - -# The positive path test -# tests that single command works as well -def test_zos_tso_single_command_auth_command_listds(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands="LISTDS 'imstestl.ims1.temp.ps'") - for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + results_allocate = hosts.all.zos_tso_command(commands=command_string) + # Validate the correct allocation of dataset + for result in results_allocate.contacted.values(): + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -# tests that single command works as well with alias -def test_zos_tso_command_auth_command_listds_using_alias(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(command=["LISTDS 'imstestl.ims1.temp.ps'"]) + # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command + results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -# tests that alias "command" works -def test_zos_tso_single_command_auth_command_listds_using_alias(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(command="LISTDS 'imstestl.ims1.temp.ps'") + # Validate LISTDS using alias param 'command' + results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -def test_zos_tso_command_unauth_command_listcat(ansible_zos_module): - hosts = ansible_zos_module + # Validate LISTCAT command and an unauth command results = hosts.all.zos_tso_command( - commands=["LISTCAT ENT('imstestl.ims1.temp.ps')"] + commands=["LISTCAT ENT('{0}')".format(DEFAULT_TEMP_DATASET)] ) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -def test_zos_tso_command_both_unauth_and_auth_command(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["delete 'imstestl.ims1.temp.ps'"]) + # Validate remove dataset + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The failure path test -# the dataset has be deleted. -def test_zos_tso_command_valid_command_failed_as_has_been_deleted(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["delete 'imstestl.ims1.temp.ps'"]) + # Expect the tso_command to fail here because the previous command will have already deleted the data set + # Validate data set was removed by previous call + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 8 + for item in result.get("output"): + assert item.get("rc") == 8 assert result.get("changed") is False @@ -137,6 +99,8 @@ def test_zos_tso_command_empty_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=[""]) for result in results.contacted.values(): + for item in result.get("output"): + assert item.get("rc") == 255 assert result.get("changed") is False @@ -146,7 +110,8 @@ def test_zos_tso_command_invalid_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=["xxxxxx"]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 255 + for item in result.get("output"): + assert item.get("rc") == 255 assert result.get("changed") is False @@ -158,7 +123,10 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): results = hosts.all.zos_tso_command(commands=commands_list) for result in results.contacted.values(): for item in result.get("output"): - assert item.get("rc") == 0 + if item.get("command") == "LU omvsadm": + assert item.get("rc") == 0 + if item.get("command") == "LISTGRP": + assert item.get("rc") == 0 assert result.get("changed") is True @@ -169,6 +137,5 @@ def test_zos_tso_command_maxrc(ansible_zos_module): results = hosts.all.zos_tso_command(commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"],max_rc=4) for result in results.contacted.values(): for item in result.get("output"): - print( item ) assert item.get("rc") < 5 assert result.get("changed") is True From 04880d25c137c60ed9e0965e5848c07b641d4113 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Sat, 22 Jul 2023 09:23:27 -0700 Subject: [PATCH 145/413] Update make file doc generation with pre and post scripts for a subset of modules. (#906) * Update make file and add scripts to correct doc gen Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc with missing definition of timestamp Signed-off-by: ddimatos <dimatos@gmail.com> * Correct doc to remove colon to prevent doc gen warning Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc base on minor module doc changes Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/906-update-doc-generation.yml | 4 ++ docs/Makefile | 9 ++++ docs/scripts/post-template.sh | 23 ++++++++ docs/scripts/post-zos_apf.sh | 32 ++++++++++++ docs/scripts/pre-template.sh | 32 ++++++++++++ docs/source/modules/zos_apf.rst | 4 +- docs/source/modules/zos_archive.rst | 3 +- docs/source/modules/zos_data_set.rst | 4 +- docs/source/modules/zos_job_output.rst | 52 +++++++++++++++++++ docs/source/modules/zos_job_query.rst | 32 ++++++++---- docs/source/modules/zos_job_submit.rst | 33 ++++++++---- docs/source/modules/zos_unarchive.rst | 1 - plugins/modules/zos_apf.py | 2 + plugins/modules/zos_archive.py | 2 +- 14 files changed, 208 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/906-update-doc-generation.yml create mode 100755 docs/scripts/post-template.sh create mode 100755 docs/scripts/post-zos_apf.sh create mode 100755 docs/scripts/pre-template.sh diff --git a/changelogs/fragments/906-update-doc-generation.yml b/changelogs/fragments/906-update-doc-generation.yml new file mode 100644 index 000000000..f2e5ae316 --- /dev/null +++ b/changelogs/fragments/906-update-doc-generation.yml @@ -0,0 +1,4 @@ +trivial: +- make - Current doc generation requires manual intervention, this change will + allow for doc generation without any manual intervention and removes warnings. + (https://github.com/ansible-collections/ibm_zos_core/pull/906) \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile index 573448a66..5f412c510 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -172,9 +172,18 @@ module-doc: mv ../plugins/modules/__init__.py ../plugins/modules/__init__.py.skip; \ fi + @echo "Replacing string based carriage returns with literal escaped to produce sphynx consumable RST." + scripts/pre-template.sh + @echo "Generating ReStructuredText for all ansible modules found at '../plugins/modules/' to 'source/modules'." @ansible-doc-extractor --template templates/module.rst.j2 source/modules ../plugins/modules/*.py + @echo "Updating zos_apf file." + scripts/post-zos_apf.sh + + @echo "Reverting edited source file." + scripts/post-template.sh + @if test -e ../plugins/modules/__init__.py.skip; \ echo "Rename file '../plugins/modules/__init__.py.skip' back to ../plugins/modules/__init__.py.'"; \ then mv -f ../plugins/modules/__init__.py.skip ../plugins/modules/__init__.py; \ diff --git a/docs/scripts/post-template.sh b/docs/scripts/post-template.sh new file mode 100755 index 000000000..73175ec35 --- /dev/null +++ b/docs/scripts/post-template.sh @@ -0,0 +1,23 @@ +#!/bin/sh + +################################################################################ +# © Copyright IBM Corporation 2020 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +################################################################################ +# This scripts actions called before after generating RST such that the +# original template.py is put back in its original state. +################################################################################ + +# Obtain the galaxy collection installion up to the template.py located on the host +template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` +mv $template_doc_source.tmp $template_doc_source \ No newline at end of file diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh new file mode 100755 index 000000000..a74207e48 --- /dev/null +++ b/docs/scripts/post-zos_apf.sh @@ -0,0 +1,32 @@ +#!/bin/sh + +################################################################################ +# © Copyright IBM Corporation 2020 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +################################################################################ +# This scripts actions called after generating RST and before generating Html. +# This script corrects the RST so that correct HTMl can be generated removing the +# warning: +# ibm_zos_core/docs/source/modules.rst:23: WARNING: toctree glob pattern 'modules/*' didn't match any documents +# This script will replace: +# " | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */" +# To this: +# " | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> \*/" +################################################################################ +set -x +SCRIPT_DIR=`dirname "$0"` +CURR_PATH=`pwd` +# Delete any temporary index RST +if [[ -f $CURR_PATH/source/modules/zos_apf.rst ]]; then + sed -i '' "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst +fi diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh new file mode 100755 index 000000000..8c627e0a5 --- /dev/null +++ b/docs/scripts/pre-template.sh @@ -0,0 +1,32 @@ +#!/bin/sh + +################################################################################ +# © Copyright IBM Corporation 2020 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +################################################################################ +# This scripts actions called before generating RST, this scripts leaves the +# "\n", "\r", "\r\n" in the template.py doc_fragment so that ansible linting +# test will pass such that the doc and module are match. Later this script will +# update the above strings to liters with an esacpe, for example "\n" --> '\\n'. +# This allows for RST to be generated that is usable by the ansible-doc-extractor +# and Jinja2 template, and later sphinx html. +# This requries that the ansible collection be prebuilt so that it can find +# the template.py within the collection (not within the git project). Thus run +# './ac --ac-build' before the make file that builds doc. +################################################################################ + +template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` +cp $template_doc_source $template_doc_source.tmp +sed -i '' "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source +sed -i '' "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source +sed -i '' "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index c1f3c3fd9..195b34611 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -128,9 +128,11 @@ persistent ``{mark}`` length may not exceed 72 characters. + The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + | **required**: False | **type**: str - | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */ + | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> \*/ backup diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index bb4383f74..221de41ec 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -26,7 +26,6 @@ Synopsis - Parameters ---------- @@ -122,7 +121,7 @@ dest exclude Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. - Patterns (wildcards) can contain one of the following: ?, *. + Patterns (wildcards) can contain one of the following, `?`, `*`. * matches everything. diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 9e051bece..ddcc97a8b 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -146,7 +146,7 @@ record_format | **required**: False | **type**: str | **default**: FB - | **choices**: FB, VB, FBA, VBA, U + | **choices**: FB, VB, FBA, VBA, U, F sms_storage_class @@ -417,7 +417,7 @@ batch | **required**: False | **type**: str | **default**: FB - | **choices**: FB, VB, FBA, VBA, U + | **choices**: FB, VB, FBA, VBA, U, F sms_storage_class diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index bf57fb03a..76ae0364c 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -217,9 +217,14 @@ jobs "stepname": "STEP0001" } ], + "duration": 0, + "job_class": "R", "job_id": "JOB00134", "job_name": "HELLO", "owner": "OMVSADM", + "priority": "1", + "program_name": "IEBGENER", + "queue_position": "58", "ret_code": { "code": 0, "msg": "CC 0000", @@ -265,6 +270,18 @@ jobs | **type**: str | **sample**: JOB + creation_date + Date, local to the target system, when the job was created. + + | **type**: str + | **sample**: 2023-05-04 + + creation_time + Time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 14:15:00 + ddnames Data definition names. @@ -334,6 +351,41 @@ jobs ] + job_class + Job class for this job. + + | **type**: str + | **sample**: A + + svc_class + Service class for this job. + + | **type**: str + | **sample**: C + + priority + A numeric indicator of the job priority assigned through JES. + + | **type**: int + | **sample**: 4 + + asid + The address Space Identifier (ASID) that is a unique descriptor for the job address space. Zero if not active. + + | **type**: int + + queue_position + The position within the job queue where the jobs resides. + + | **type**: int + | **sample**: 3 + + program_name + The name of the program found in the job's last completed step found in the PGM parameter. + + | **type**: str + | **sample**: IEBGENER + ret_code Return code output collected from job log. diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 40bd7b353..519f5801a 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -134,7 +134,8 @@ jobs [ { "asid": 0, - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "job_class": "K", "job_id": "JOB01427", "job_name": "LINKJOB", @@ -146,7 +147,8 @@ jobs }, { "asid": 4, - "creation_datetime": "20230503T121400", + "creation_date": "2023-05-03", + "creation_time": "12:14:00", "job_class": "A", "job_id": "JOB16577", "job_name": "LINKCBL", @@ -245,13 +247,13 @@ jobs job_class - Letter indicating job class for this job. + Job class for this job. | **type**: str | **sample**: A svc_class - Character indicating service class for this job. + Service class for this job. | **type**: str | **sample**: C @@ -263,22 +265,34 @@ jobs | **sample**: 4 asid - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. Zero if not active. | **type**: int - creation_datetime - Date and time, local to the target system, when the job was created. + creation_date + Date, local to the target system, when the job was created. | **type**: str - | **sample**: 20230504T141500 + | **sample**: 2023-05-04 + + creation_time + Time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 14:15:00 queue_position - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. | **type**: int | **sample**: 3 + program_name + The name of the program found in the job's last completed step found in the PGM parameter. + + | **type**: str + | **sample**: IEBGENER + message Message returned on failure. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 9714f2766..8d5b8ecef 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -353,7 +353,8 @@ jobs "asid": 0, "class": "K", "content_type": "JOB", - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "ddnames": [ { "byte_count": "677", @@ -553,6 +554,7 @@ jobs "job_name": "DBDGEN00", "owner": "OMVSADM", "priority": 1, + "program_name": "IEBGENER", "queue_position": 3, "ret_code": { "code": 0, @@ -567,7 +569,8 @@ jobs ] }, "subsystem": "STL1", - "svc_class": "?" + "svc_class": "?", + "system": "STL1" } ] @@ -722,13 +725,13 @@ jobs job_class - Letter indicating job class for this job. + Job class for this job. | **type**: str | **sample**: A svc_class - Character indicating service class for this job. + Service class for this job. | **type**: str | **sample**: C @@ -740,22 +743,34 @@ jobs | **sample**: 4 asid - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. Zero if not active. | **type**: int - creation_datetime - Date and time, local to the target system, when the job was created. + creation_date + Date, local to the target system, when the job was created. | **type**: str - | **sample**: 20230504T141500 + | **sample**: 2023-05-04 + + creation_time + Time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 14:15:00 queue_position - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. | **type**: int | **sample**: 3 + program_name + The name of the program found in the job's last completed step found in the PGM parameter. + + | **type**: str + | **sample**: IEBGENER + message This option is being deprecated diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index ecced2362..ae3b92516 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -26,7 +26,6 @@ Synopsis - Parameters ---------- diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 072deb29b..d3a945d1b 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -118,6 +118,8 @@ - Using a custom marker without the C({mark}) variable may result in the block being repeatedly inserted on subsequent playbook runs. - C({mark}) length may not exceed 72 characters. + - The timestamp (<timestamp>) used in the default marker + follows the '+%Y%m%d-%H%M%S' date format required: False type: str default: "/* {mark} ANSIBLE MANAGED BLOCK <timestamp> */" diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 8b887e1bf..0ace2b608 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -117,7 +117,7 @@ - Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. - - "Patterns (wildcards) can contain one of the following: ?, *." + - "Patterns (wildcards) can contain one of the following, `?`, `*`." - "* matches everything." - "? matches any single character." type: list From 2ef7147f034404100dfeac1e4844d2573e0ea85f Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 26 Jul 2023 14:18:40 -0400 Subject: [PATCH 146/413] Enhance/911/Improve-job-query-performance (#911) * Initial change to manage 1.2.4 column changes for job listing * Improved documentation, code validated with playbooks. * Updated changelog fragment with PR * corrected duplicate entry in output documentation * Changes to comments and field names as per PR 841 Update to in-code documentation Expansion of changelog fragment * correction to datetime processing * fixing sample data * changed job to pass column options to disable the program_name column tested against fresh zoau build (881) * removed 'testing' comment * updated re-request call to use the kwargs function. note: this is for record-keeping... about to rebuild this section * Modified call chain in job:status to not pull dd's, making it faster added new 'don't get dd' variable in get_job_output internal * corrected pep8 issues (spaces and parentheses) * Addressing requested changes: eliminate double negative Added new changelog fragment, corrected link to PR Updated return documentation to show dependancy on zoau version * Correct grammar in changelog Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../911-Improve-job-query-performance.yml | 4 + plugins/module_utils/job.py | 197 +++++++++--------- plugins/modules/zos_job_output.py | 1 + plugins/modules/zos_job_query.py | 1 + plugins/modules/zos_job_submit.py | 1 + 5 files changed, 106 insertions(+), 98 deletions(-) create mode 100644 changelogs/fragments/911-Improve-job-query-performance.yml diff --git a/changelogs/fragments/911-Improve-job-query-performance.yml b/changelogs/fragments/911-Improve-job-query-performance.yml new file mode 100644 index 000000000..a6722636e --- /dev/null +++ b/changelogs/fragments/911-Improve-job-query-performance.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. + This change removes those resulting in a performance increase in job related queries. + (https://github.com/ansible-collections/ibm_zos_core/pull/911) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 391583b75..3a9c3b35e 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -137,6 +137,8 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): owner {str} -- The owner of the job (default: {None}) job_name {str} -- The job name search for (default: {None}) dd_name {str} -- If populated, return ONLY this DD in the job list (default: {None}) + note: no routines call job_status with dd_name, so we are speeding this routine with + 'dd_scan=False' Returns: list[dict] -- The status information for a list of jobs matching search criteria. @@ -148,26 +150,24 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): job_id=dict(arg_type="str"), owner=dict(arg_type="qualifier_pattern"), job_name=dict(arg_type="str"), - dd_name=dict(arg_type="str"), ) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args( - {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name} + {"job_id": job_id, "owner": owner, "job_name": job_name} ) job_id = parsed_args.get("job_id") or "*" job_name = parsed_args.get("job_name") or "*" owner = parsed_args.get("owner") or "*" - dd_name = parsed_args.get("dd_name") - job_status_result = _get_job_status(job_id, owner, job_name, dd_name) + job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) if len(job_status_result) == 0: job_id = "" if job_id == "*" else job_id job_name = "" if job_name == "*" else job_name owner = "" if owner == "*" else owner - job_status_result = _get_job_status(job_id, owner, job_name, dd_name) + job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) return job_status_result @@ -195,7 +195,7 @@ def _parse_steps(job_str): return stp -def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration=0, timeout=0, start_time=timer()): +def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): if job_id == "*": job_id_temp = None else: @@ -210,14 +210,20 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # creationdatetime=job[9] queueposition=job[10] # starting in zoau 1.2.4, program_name[11] was added. + # Testing has shown that the program_name impact is minor, so we're removing that option + # This will also help maintain compatibility with 1.2.3 + final_entries = [] - entries = listing(job_id=job_id_temp) + kwargs = { + "job_id": job_id_temp, + } + entries = listing(**kwargs) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = listing(job_id=job_id_temp) + entries = listing(**kwargs) if entries: for entry in entries: @@ -249,15 +255,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # this section only works on zoau 1.2.3/+ vvv - if ZOAU_API_VERSION > "1.2.2" and ZOAU_API_VERSION < "1.2.4": - job["job_class"] = entry.job_class - job["svc_class"] = entry.svc_class - job["priority"] = entry.priority - job["asid"] = entry.asid - job["creation_date"] = str(entry.creation_datetime)[0:10] - job["creation_time"] = str(entry.creation_datetime)[12:] - job["queue_position"] = entry.queue_position - elif ZOAU_API_VERSION >= "1.2.4": + if ZOAU_API_VERSION > "1.2.2": job["job_class"] = entry.job_class job["svc_class"] = entry.svc_class job["priority"] = entry.priority @@ -265,6 +263,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["creation_date"] = str(entry.creation_datetime)[0:10] job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position + if ZOAU_API_VERSION >= "1.2.4": job["program_name"] = entry.program_name # this section only works on zoau 1.2.3/+ ^^^ @@ -274,93 +273,95 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"]["steps"] = [] job["ddnames"] = [] - list_of_dds = list_dds(entry.id) - while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): - current_time = timer() - duration = round(current_time - start_time) - sleep(1) + if dd_scan: list_of_dds = list_dds(entry.id) + while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): + current_time = timer() + duration = round(current_time - start_time) + sleep(1) + list_of_dds = list_dds(entry.id) + + for single_dd in list_of_dds: + dd = {} + + # If dd_name not None, only that specific dd_name should be returned + if dd_name is not None: + if dd_name not in single_dd["dataset"]: + continue + else: + dd["ddname"] = single_dd["dataset"] + + if "dataset" not in single_dd: + continue - for single_dd in list_of_dds: - dd = {} + if "recnum" in single_dd: + dd["record_count"] = single_dd["recnum"] + else: + dd["record_count"] = None - # If dd_name not None, only that specific dd_name should be returned - if dd_name is not None: - if dd_name not in single_dd["dataset"]: - continue + if "dsid" in single_dd: + dd["id"] = single_dd["dsid"] else: - dd["ddname"] = single_dd["dataset"] + dd["id"] = "?" - if "dataset" not in single_dd: - continue + if "stepname" in single_dd: + dd["stepname"] = single_dd["stepname"] + else: + dd["stepname"] = None + + if "procstep" in single_dd: + dd["procstep"] = single_dd["procstep"] + else: + dd["proctep"] = None - if "recnum" in single_dd: - dd["record_count"] = single_dd["recnum"] - else: - dd["record_count"] = None - - if "dsid" in single_dd: - dd["id"] = single_dd["dsid"] - else: - dd["id"] = "?" - - if "stepname" in single_dd: - dd["stepname"] = single_dd["stepname"] - else: - dd["stepname"] = None - - if "procstep" in single_dd: - dd["procstep"] = single_dd["procstep"] - else: - dd["proctep"] = None - - if "length" in single_dd: - dd["byte_count"] = single_dd["length"] - else: - dd["byte_count"] = 0 - - tmpcont = None - if "stepname" in single_dd: - if "dataset" in single_dd: - tmpcont = read_output( - entry.id, single_dd["stepname"], single_dd["dataset"]) - - dd["content"] = tmpcont.split("\n") - job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) - - job["ddnames"].append(dd) - if len(job["class"]) < 1: - if "- CLASS " in tmpcont: - tmptext = tmpcont.split("- CLASS ")[1] - job["class"] = tmptext.split(" ")[0] - - if len(job["system"]) < 1: - if "-- S Y S T E M " in tmpcont: - tmptext = tmpcont.split("-- S Y S T E M ")[1] - job["system"] = (tmptext.split( - "--", 1)[0]).replace(" ", "") - - if len(job["subsystem"]) < 1: - if "-- N O D E " in tmpcont: - tmptext = tmpcont.split("-- N O D E ")[1] - job["subsystem"] = (tmptext.split("\n")[ - 0]).replace(" ", "") - - # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " - # then further reduce down to: 'JCL ERROR 029' - if job["ret_code"]["msg_code"] == "?": - if "JOB NOT RUN -" in tmpcont: - tmptext = tmpcont.split( - "JOB NOT RUN -")[1].split("\n")[0] - job["ret_code"]["msg"] = tmptext.strip() - job["ret_code"]["msg_code"] = None - job["ret_code"]["code"] = None - if len(list_of_dds) > 0: - # The duration should really only be returned for job submit but the code - # is used job_output as well, for now we can ignore this point unless - # we want to offer a wait_time_s for job output which might be reasonable. - job["duration"] = duration - final_entries.append(job) + if "length" in single_dd: + dd["byte_count"] = single_dd["length"] + else: + dd["byte_count"] = 0 + + tmpcont = None + if "stepname" in single_dd: + if "dataset" in single_dd: + tmpcont = read_output( + entry.id, single_dd["stepname"], single_dd["dataset"]) + + dd["content"] = tmpcont.split("\n") + job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) + + job["ddnames"].append(dd) + if len(job["class"]) < 1: + if "- CLASS " in tmpcont: + tmptext = tmpcont.split("- CLASS ")[1] + job["class"] = tmptext.split(" ")[0] + + if len(job["system"]) < 1: + if "-- S Y S T E M " in tmpcont: + tmptext = tmpcont.split("-- S Y S T E M ")[1] + job["system"] = (tmptext.split( + "--", 1)[0]).replace(" ", "") + + if len(job["subsystem"]) < 1: + if "-- N O D E " in tmpcont: + tmptext = tmpcont.split("-- N O D E ")[1] + job["subsystem"] = (tmptext.split("\n")[ + 0]).replace(" ", "") + + # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " + # then further reduce down to: 'JCL ERROR 029' + if job["ret_code"]["msg_code"] == "?": + if "JOB NOT RUN -" in tmpcont: + tmptext = tmpcont.split( + "JOB NOT RUN -")[1].split("\n")[0] + job["ret_code"]["msg"] = tmptext.strip() + job["ret_code"]["msg_code"] = None + job["ret_code"]["code"] = None + if len(list_of_dds) > 0: + # The duration should really only be returned for job submit but the code + # is used job_output as well, for now we can ignore this point unless + # we want to offer a wait_time_s for job output which might be reasonable. + job["duration"] = duration + + final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") return final_entries diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 636698b3b..3803acc2c 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -214,6 +214,7 @@ program_name: description: The name of the program found in the job's last completed step found in the PGM parameter. + Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. type: str sample: "IEBGENER" ret_code: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 431e06f02..283467766 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -213,6 +213,7 @@ program_name: description: The name of the program found in the job's last completed step found in the PGM parameter. + Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. type: str sample: "IEBGENER" diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 994f4147d..efdbd07d6 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -319,6 +319,7 @@ program_name: description: The name of the program found in the job's last completed step found in the PGM parameter. + Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. type: str sample: "IEBGENER" From 5005283753836c3c4660c7403b37648d581f450d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 12:57:17 -0700 Subject: [PATCH 147/413] Update Readme with new collection content Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5d48210a9..5cbd6fd98 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ executing operator commands, executing TSO commands, ping, querying operator actions, APF authorizing libraries, editing textual data in data sets or Unix System Services files, finding data sets, backing up and restoring data sets and -volumes, mounting file systems, running z/OS programs without JCL and -initializing volumes. +volumes, mounting file systems, running z/OS programs without JCL, +initializing volumes, archiving, unarchiving and templating with Jinja. Red Hat Ansible Certified Content for IBM Z From 1f0cb177c8ed2a0405ebbc120027b97ee15d2bd8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 13:09:07 -0700 Subject: [PATCH 148/413] Update copyright yrs Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/zoau_version_checker.py | 2 +- plugins/modules/zos_job_output.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index a5fff7196..c88dac481 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 3803acc2c..ec4aa0313 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From b4ef982b5570841a351dad4d4916ab604ede3227 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 13:57:34 -0700 Subject: [PATCH 149/413] Generated doc updates Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_blockinfile.rst | 2 +- docs/source/modules/zos_job_output.rst | 2 +- docs/source/modules/zos_job_query.rst | 2 +- docs/source/modules/zos_job_submit.rst | 2 +- docs/source/modules/zos_lineinfile.rst | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 3633620ad..e1e11486c 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -312,7 +312,7 @@ Return Values changed - Indicates if the source was modified + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index 76ae0364c..efea6ea2a 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -381,7 +381,7 @@ jobs | **sample**: 3 program_name - The name of the program found in the job's last completed step found in the PGM parameter. + The name of the program found in the job's last completed step found in the PGM parameter. Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. | **type**: str | **sample**: IEBGENER diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 519f5801a..ea320dfc3 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -288,7 +288,7 @@ jobs | **sample**: 3 program_name - The name of the program found in the job's last completed step found in the PGM parameter. + The name of the program found in the job's last completed step found in the PGM parameter. Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. | **type**: str | **sample**: IEBGENER diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 8d5b8ecef..e0fd8e2d1 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -766,7 +766,7 @@ jobs | **sample**: 3 program_name - The name of the program found in the job's last completed step found in the PGM parameter. + The name of the program found in the job's last completed step found in the PGM parameter. Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. | **type**: str | **sample**: IEBGENER diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 89ebcc805..e352007df 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -269,7 +269,7 @@ Return Values changed - Indicates if the source was modified + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool From 55740ea292776eef8375161fa32fa138fec1e0c3 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 14:00:17 -0700 Subject: [PATCH 150/413] Update module doc to clear the boolean value comes back as 1 or 0 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_blockinfile.py | 4 +++- plugins/modules/zos_lineinfile.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 014382f1e..7484d93ec 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -283,7 +283,9 @@ RETURN = r""" changed: - description: Indicates if the source was modified + description: + Indicates if the source was modified. + Value of 1 represents `true`, otherwise `false`. returned: success type: bool sample: 1 diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index c2a7a719c..6536509fd 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -242,7 +242,9 @@ RETURN = r""" changed: - description: Indicates if the source was modified + description: + Indicates if the source was modified. + Value of 1 represents `true`, otherwise `false`. returned: success type: bool sample: 1 From 438397e32b845e08579a21fcd352c017a36e0de5 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 14:49:10 -0700 Subject: [PATCH 151/413] Move a fragment that was outside the changlog folder Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/771-update-ansible-version.yaml | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 changelogs/771-update-ansible-version.yaml diff --git a/changelogs/771-update-ansible-version.yaml b/changelogs/771-update-ansible-version.yaml deleted file mode 100644 index 92354841b..000000000 --- a/changelogs/771-update-ansible-version.yaml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: -- doc - Updated the documentation in the README and release_notes.rst to reflect - ansible, ansible-core, Automation Hub and z/OS version. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) -- templates - Update the git issue templates with current and - future product versions. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file From 04bc4ab54dd435a5b45221af0e21a108da2c12a6 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 14:56:11 -0700 Subject: [PATCH 152/413] Changelog fragment lint error corrections and summary added Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/696-zos-copy-remove-emergency-backup.yml | 2 +- changelogs/fragments/771-update-ansible-version.yaml | 7 +++++++ ...tion_attributes_had_hardcoded_type_and_recordformat.yml | 2 +- changelogs/fragments/840-redesign-test-cases.yml | 2 +- changelogs/fragments/v1.7.0-beta.1_summary.yml | 6 ++++++ 5 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/771-update-ansible-version.yaml create mode 100644 changelogs/fragments/v1.7.0-beta.1_summary.yml diff --git a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml index b86a18d82..d9924cb2d 100644 --- a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml +++ b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml @@ -1,4 +1,4 @@ -enhancements: +major_changes: - zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and diff --git a/changelogs/fragments/771-update-ansible-version.yaml b/changelogs/fragments/771-update-ansible-version.yaml new file mode 100644 index 000000000..92354841b --- /dev/null +++ b/changelogs/fragments/771-update-ansible-version.yaml @@ -0,0 +1,7 @@ +trivial: +- doc - Updated the documentation in the README and release_notes.rst to reflect + ansible, ansible-core, Automation Hub and z/OS version. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) +- templates - Update the git issue templates with current and + future product versions. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file diff --git a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml index 8f4246f85..5b4e14aa8 100644 --- a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml +++ b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. diff --git a/changelogs/fragments/840-redesign-test-cases.yml b/changelogs/fragments/840-redesign-test-cases.yml index 8b9c2aee0..c998eeee4 100644 --- a/changelogs/fragments/840-redesign-test-cases.yml +++ b/changelogs/fragments/840-redesign-test-cases.yml @@ -1,7 +1,7 @@ trivial: - zos_lininfile - Adjust test cases to be in one document and clearer to follow. - zos_blockinfile - Adjust test cases to be in one document and clearer to follow. -bugfix: +bugfixes: - zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) \ No newline at end of file diff --git a/changelogs/fragments/v1.7.0-beta.1_summary.yml b/changelogs/fragments/v1.7.0-beta.1_summary.yml new file mode 100644 index 000000000..727e3da75 --- /dev/null +++ b/changelogs/fragments/v1.7.0-beta.1_summary.yml @@ -0,0 +1,6 @@ +release_summary: | + Release Date: '2023-07-26' + This changelog describes all changes made to the modules and plugins included + in this collection. The release date is the date the changelog is created. + For additional details such as required dependencies and availability review + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From 68855465a17928ac56d594a954575f446b3be624 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 16:04:26 -0700 Subject: [PATCH 153/413] Changelog generated Signed-off-by: ddimatos <dimatos@gmail.com> --- CHANGELOG.rst | 59 +++++++++++++++++-- changelogs/.plugin-cache.yaml | 12 +++- changelogs/changelog.yaml | 106 ++++++++++++++++++++++++++++++++++ 3 files changed, 172 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7cf358b23..826161e56 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,57 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.7.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2023-07-26' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + +Minor Changes +------------- + +- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) +- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) +- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) +- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) +- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) +- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + +Bugfixes +-------- + +- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) +- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) +- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). +- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). +- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) +- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762). + +New Modules +----------- + +- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. +- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. + v1.6.0 ====== @@ -26,11 +77,11 @@ Minor Changes ------------- - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning".. warn Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) -- module_utils - job.py utility did not support positional wild card placement, this enhancement uses `fnmatch` logic to support wild cards. +- module_utils - job.py utility did not support positional wiled card placement, this enhancement uses `fnmatch` logic to support wild cards. - zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) - zos_copy - was enhanced to keep track of modified members in a destination dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) -- zos_data_set - add force parameter to enable member delete while PDS/e is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). -- zos_job_query - ansible module does not support positional wild card placement for `job_name` or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). +- zos_job_query - ansible module does not support positional wild card placement for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) - zos_lineinfile - would access data sets with exclusive access so no other task can read the data, this enhancement allows for a data set to be opened with a disposition set to share so that other tasks can access the data when option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) - zos_tso_command - was enhanced to accept `max_rc` as an option. This option allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) @@ -39,7 +90,7 @@ Bugfixes - Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). -- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an appropriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an approriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) - zos_copy - Encoding normalization used to handle newlines in text files was applied to binary files too. Fix makes sure that binary files bypass this normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) - zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/743) diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index fbc11cf4b..3520dc55a 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -16,6 +16,11 @@ plugins: name: zos_apf namespace: '' version_added: 1.3.0 + zos_archive: + description: Archive files and data sets on z/OS. + name: zos_archive + namespace: '' + version_added: 1.7.0 zos_backup_restore: description: Backup and restore data sets and volumes name: zos_backup_restore @@ -106,6 +111,11 @@ plugins: name: zos_tso_command namespace: '' version_added: 1.1.0 + zos_unarchive: + description: Unarchive files and data sets in z/OS. + name: zos_unarchive + namespace: '' + version_added: 1.7.0 zos_volume_init: description: Initialize volumes or minidisks. name: zos_volume_init @@ -116,4 +126,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.6.0 +version: 1.7.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 37049f8df..5f4da9de0 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -875,3 +875,109 @@ releases: name: zos_volume_init namespace: '' release_date: '2023-04-26' + 1.7.0-beta.1: + changes: + bugfixes: + - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM + data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). + - zos_blockinfile - Test case generate a data set that was not correctly removed. + Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) + - zos_copy - Module returned the dynamic values created with the same dataset + type and record format. Fix validate the correct dataset type and record format + of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) + - zos_copy - Reported a false positive such that the response would have `changed=true` + when copying from a source (src) or destination (dest) data set that was in + use (DISP=SHR). This change now displays an appropriate error message and + returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). + - zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. + This change ensures proper test coverage for nested directories and file permissions. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_copy - Zos_copy did not encode inner content inside subdirectories once + the source was copied to the destination. Fix now encodes all content in a + source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). + - zos_copy - kept permissions on target directory when copy overwrote files. + The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) + - zos_data_set - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical flow + that will evaluate the volumes, compare it to the provided value and if necessary + catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). + - zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_job_output - Error message did not specify the job not found. Fix now + specifies the job_id or job_name being searched to ensure more information + is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) + - zos_operator - Reported a failure caused by unrelated error response. Fix + now gives a transparent response of the operator to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). + major_changes: + - zos_copy - Previously, backups were taken when force was set to false; whether + or not a user specified this operation which caused allocation issues with + space and permissions. This removes the automatic backup performed and reverts + to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + minor_changes: + - Add support for Jinja2 templates in zos_copy and zos_job_submit when using + local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) + - zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, + space_type and type in the return output when the destination data set does + not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) + - zos_data_set - record format = 'F' has been added to support 'fixed' block + records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) + - zos_job_output - zoau added 'program_name' to their field output starting + with v1.2.4. This enhancement checks for that version and passes the extra + column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + - zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, + and queue_position to the return output when querying or submitting a job. + Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) + - zos_job_query - unnecessary calls were made to find a jobs DDs that incurred + unnecessary overhead. This change removes those resulting in a performance + increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) + - zos_job_query - zoau added 'program_name' to their field output starting with + v1.2.4. This enhancement checks for that version and passes the extra column + through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + - zos_job_submit - zoau added 'program_name' to their field output starting + with v1.2.4. This enhancement checks for that version and passes the extra + column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + release_summary: 'Release Date: ''2023-07-26'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 439-add-f-recordtype.yml + - 667-template-support.yml + - 696-zos-copy-remove-emergency-backup.yml + - 747-failed_when_the_job_name_was_null_or_not_found.yaml + - 762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml + - 766-ac-command-replace-makefile.yml + - 771-update-ansible-version.yaml + - 772-Encode-files-recursively-and-test-case-for-keep-behavior.yml + - 773-return-dynamically-created-dest-attrs.yaml + - 778-query-new-fields.yml + - 789-ac-command-add-test.yml + - 789-ac-command-updates.yml + - 791-zos_data_set-update-vsam-copy.yml + - 794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml + - 795_overwrite_permissions_on_copy.yml + - 806-zos_copy_fetch-display-verbose.yml + - 812-ansible-lint.yml + - 824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml + - 839-Add-Field-to-zos-job-query.yml + - 840-redesign-test-cases.yml + - 880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml + - 906-update-doc-generation.yml + - 911-Improve-job-query-performance.yml + - v1.7.0-beta.1_summary.yml + modules: + - description: Archive files and data sets on z/OS. + name: zos_archive + namespace: '' + - description: Unarchive files and data sets in z/OS. + name: zos_unarchive + namespace: '' + release_date: '2023-07-26' From c8a6facdfeb6088e96c26e43c85a1c08c497fabc Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 22:40:04 -0700 Subject: [PATCH 154/413] release notes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 80 +++++++++++++++++++++++++++++++++-- 1 file changed, 76 insertions(+), 4 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 1e211ec89..e512de025 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,22 +6,85 @@ Releases ======== -Version 1.6.0-beta.1 +Version 1.7.0-beta.1 ==================== New Modules ----------- +- ``zos_archive`` - archive files, data sets and extend archives on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. +- ``zos_unarchive`` - unarchive files and data sets in z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. + +Major Changes +------------- + +-- ``zos_copy`` and ``zos_job_submit`` - supports Jinja2 templating which is essential for handling tasks that require advanced file modifications such as JCL. + +Minor Changes +------------- +- ``zos_copy`` + + - displays the data set attributes when the destination does not exist and was created by the module. + - reverts the logic that would automatically create backups in the event of a module failure leaving it up to the user to decide if a backup is needed. +- ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. +- ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). +- ``zos_job_query`` + - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). + - removes unnecessary queries to find DDs improving the modules performance. +- ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). + +Bugfixes +-------- +- ``zos_data_set`` - fixes occasionally occurring orphaned VSAM cluster components such as INDEX when `present=absent`. +- ``zos_fetch`` - fixes the warning that appeared about the use of _play_context.verbosity. +- ``zos_copy`` + + - fixes the warning that appeared about the use of _play_context.verbosity. + - fixes an issue where subdirectories would not be encoded. + - fixes an issue where when mode was set, the mode was not applied to existing directories and files. + - displays a error message when copying into a data set that is being accessed by another process and no longer returns with `changed=true`. + +``zos_job_output`` - displays an appropriate error message for a job is not found in the spool. +``zos_operator`` - fixes the false reports that a command failed when keywords such as *error* were seen, the module now acts as a passthrough. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. + +Version 1.6.0 +============= + +New Modules +----------- + - ``zos_volume_init`` - Can initialize volumes or minidisks on target z/OS systems which includes creating a volume label and an entry into the volume table of contents (VTOC). Minor Changes ------------- - ``zos_blockinfile`` - Adds an enhancement to allow double quotes within a block. +- ``zos_copy`` + + - Updates the behavior of the `mode` option so that permissions are applied to existing directories and contents. + - Adds an enhancement to option `restore_backup` to track modified members in a data set in the event of an error, restoring them to their previous state without reallocating the data set. - ``zos_data_set`` - Adds a new option named *force* to enable deletion of a data member in a PDSE that is simultaneously in use by others. - ``zos_job_query`` - Enables embedded positional wild card placement throughout *job_name* and *job_id* parameters. - ``zos_lineinfile`` - Adds a new option named *force* to enable modification of a data member in a data set that is simultaneously in use by others. - ``zos_tso_command`` - Adds a new option named *max_rc* to enable non-zero return codes lower than the specified maximum return as succeeded. +- ``module_utils`` + + - job - Adds support for positional wild card placement for `job_name`` and `job_id`. + - Adds support for import *common.text.converters* over the deprecated *_text* import. Bugfixes -------- @@ -31,12 +94,20 @@ Bugfixes - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. - Fixes a bug where the module would change the mode for a directory when copying in the contents of another directory. - Fixes a bug where the incorrect encoding would be used during normalization, particularly when processing newlines in files. + - Fixes a bug where binary files were not excluded when normalizing data to remove newlines. + - Fixes a bug where a *_play_context.verbosity* deprecation warning would appear. +- ``zos_fetch`` - Fixes a bug where a *_play_context.verbosity* deprecation warning would appear. - ``zos_encode`` - Fixes a bug where converted files were not tagged with the new code set afterwards. - ``zos_find`` - Fixes a bug where the module would stop searching and exit after the first value in a list was not found. - ``zos_lineinfile`` - Removes use of Python f-string to ensure support for Python 2.7 on the controller. - - Fixes a bug where an incorect error message would be raised when a USS source was not found. + - Fixes a bug where an incorrect error message would be raised when a USS source was not found. +- ``module_utils`` + + - data_set - Fixes an failure caused by cataloging a VSAM data set when the data set is not cataloged. +- ``zos_data_set`` - Fixes a bug that will leave VSAM data set cluster components behind when instructed to delete the data set (`present=absent`). +- ``zos_gather_facts`` - Fixes a bug that prevented the module from executing with newer versions of ZOAU. Availability ------------ @@ -48,12 +119,11 @@ Availability Reference --------- -* Supported by `z/OS Version`_ V2R4 or later +* Supported by `z/OS V2R3`_ or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. - Version 1.5.0 ============= @@ -853,6 +923,8 @@ Reference https://www.ibm.com/docs/en/zoau/1.1.1 .. _Z Open Automation Utilities 1.2.2: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.2.3: + https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS V2R3: From 991b3a583e866caee5f42841eee52987ac009b55 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 27 Jul 2023 21:57:24 -0700 Subject: [PATCH 155/413] Delete changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/309-replace-text-zos-encode.yml | 4 --- ...os-job-query-handle-multiple-wildcards.yml | 7 ---- ...8-zos-data-set-support-disposition-shr.yml | 2 -- .../408-restore-members-on-failure.yml | 4 --- ...can-quotes-in-content-can-be-supported.yml | 5 --- changelogs/fragments/439-add-f-recordtype.yml | 4 --- .../574-zos_find_stoppedonnotfound.yml | 4 --- .../584-zos_lineinfile-error-message.yml | 2 -- .../fragments/602-text-converter-import.yml | 6 ---- ...es-is-applied-to-destination-directory.yml | 3 -- .../654-new-module-zos_volume_init.yml | 2 -- .../fragments/659-zos-lineinfile-f-string.yml | 8 ----- .../663-zos_gather_facts-update-docstring.yml | 2 -- .../fragments/666-zos_tso_command_maxrc.yml | 4 --- changelogs/fragments/667-template-support.yml | 4 --- .../fragments/683-zos_job_submit-bugs.yml | 35 ------------------- .../696-zos-copy-remove-emergency-backup.yml | 6 ---- .../727-zos-blockinfile-examples.yml | 5 --- .../729-zos_operator-example-added.yml | 4 --- .../731-zos_linefile-disposition_share.yaml | 6 ---- .../734-copy-loadlib-member-test-case.yml | 4 --- .../740-zos_copy-volume-symbol-test.yml | 5 --- .../fragments/743-zos_copy-encoding-bugs.yml | 9 ----- ...en_the_job_name_was_null_or_not_found.yaml | 5 --- ...re-caused-by-unrelated-error-response.yaml | 4 --- .../766-ac-command-replace-makefile.yml | 4 --- .../fragments/771-update-ansible-version.yaml | 7 ---- ...sively-and-test-case-for-keep-behavior.yml | 5 --- ...return-dynamically-created-dest-attrs.yaml | 6 ---- changelogs/fragments/778-query-new-fields.yml | 5 --- .../fragments/789-ac-command-add-test.yml | 3 -- .../fragments/789-ac-command-updates.yml | 3 -- .../791-zos_data_set-update-vsam-copy.yml | 11 ------ ...or_message_when_concurrent_copy_fails.yaml | 6 ---- .../795_overwrite_permissions_on_copy.yml | 4 --- .../806-zos_copy_fetch-display-verbose.yml | 17 --------- changelogs/fragments/812-ansible-lint.yml | 4 --- ...es_had_hardcoded_type_and_recordformat.yml | 5 --- .../839-Add-Field-to-zos-job-query.yml | 10 ------ .../fragments/840-redesign-test-cases.yml | 7 ---- ..._currently_can_not_be_run_concurrently.yml | 6 ---- .../fragments/906-update-doc-generation.yml | 4 --- .../911-Improve-job-query-performance.yml | 4 --- .../fragments/v1.7.0-beta.1_summary.yml | 6 ---- 44 files changed, 261 deletions(-) delete mode 100644 changelogs/fragments/309-replace-text-zos-encode.yml delete mode 100644 changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml delete mode 100644 changelogs/fragments/358-zos-data-set-support-disposition-shr.yml delete mode 100644 changelogs/fragments/408-restore-members-on-failure.yml delete mode 100644 changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml delete mode 100644 changelogs/fragments/439-add-f-recordtype.yml delete mode 100644 changelogs/fragments/574-zos_find_stoppedonnotfound.yml delete mode 100644 changelogs/fragments/584-zos_lineinfile-error-message.yml delete mode 100644 changelogs/fragments/602-text-converter-import.yml delete mode 100644 changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml delete mode 100644 changelogs/fragments/654-new-module-zos_volume_init.yml delete mode 100644 changelogs/fragments/659-zos-lineinfile-f-string.yml delete mode 100644 changelogs/fragments/663-zos_gather_facts-update-docstring.yml delete mode 100644 changelogs/fragments/666-zos_tso_command_maxrc.yml delete mode 100644 changelogs/fragments/667-template-support.yml delete mode 100644 changelogs/fragments/683-zos_job_submit-bugs.yml delete mode 100644 changelogs/fragments/696-zos-copy-remove-emergency-backup.yml delete mode 100644 changelogs/fragments/727-zos-blockinfile-examples.yml delete mode 100644 changelogs/fragments/729-zos_operator-example-added.yml delete mode 100644 changelogs/fragments/731-zos_linefile-disposition_share.yaml delete mode 100644 changelogs/fragments/734-copy-loadlib-member-test-case.yml delete mode 100644 changelogs/fragments/740-zos_copy-volume-symbol-test.yml delete mode 100644 changelogs/fragments/743-zos_copy-encoding-bugs.yml delete mode 100644 changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml delete mode 100644 changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml delete mode 100644 changelogs/fragments/766-ac-command-replace-makefile.yml delete mode 100644 changelogs/fragments/771-update-ansible-version.yaml delete mode 100644 changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml delete mode 100644 changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml delete mode 100644 changelogs/fragments/778-query-new-fields.yml delete mode 100644 changelogs/fragments/789-ac-command-add-test.yml delete mode 100644 changelogs/fragments/789-ac-command-updates.yml delete mode 100644 changelogs/fragments/791-zos_data_set-update-vsam-copy.yml delete mode 100644 changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml delete mode 100644 changelogs/fragments/795_overwrite_permissions_on_copy.yml delete mode 100644 changelogs/fragments/806-zos_copy_fetch-display-verbose.yml delete mode 100644 changelogs/fragments/812-ansible-lint.yml delete mode 100644 changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml delete mode 100644 changelogs/fragments/839-Add-Field-to-zos-job-query.yml delete mode 100644 changelogs/fragments/840-redesign-test-cases.yml delete mode 100644 changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml delete mode 100644 changelogs/fragments/906-update-doc-generation.yml delete mode 100644 changelogs/fragments/911-Improve-job-query-performance.yml delete mode 100644 changelogs/fragments/v1.7.0-beta.1_summary.yml diff --git a/changelogs/fragments/309-replace-text-zos-encode.yml b/changelogs/fragments/309-replace-text-zos-encode.yml deleted file mode 100644 index b4ba2b53d..000000000 --- a/changelogs/fragments/309-replace-text-zos-encode.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_encode - fixes a bug where converted files were not tagged afterwards - with the new code set. - (https://github.com/ansible-collections/ibm_zos_core/pull/534) \ No newline at end of file diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml deleted file mode 100644 index 060df2fb1..000000000 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: -- zos_job_query - ansible module does not support positional wild card placement - for `job_name1 or `job_id`. This enhancement allows embedded wildcards - throughout the `job_name` and `job_id`. - (https://github.com/ansible-collections/ibm_zos_core/pull/721) -- module_utils - job.py utility did not support positional wiled card placement, - this enhancement uses `fnmatch` logic to support wild cards. diff --git a/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml b/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml deleted file mode 100644 index 4102bab0d..000000000 --- a/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). \ No newline at end of file diff --git a/changelogs/fragments/408-restore-members-on-failure.yml b/changelogs/fragments/408-restore-members-on-failure.yml deleted file mode 100644 index 3e6c50d12..000000000 --- a/changelogs/fragments/408-restore-members-on-failure.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_copy - was enhanced to keep track of modified members in a destination - dataset, restoring them to their previous state in case of a failure. - (https://github.com/ansible-collections/ibm_zos_core/pull/551) \ No newline at end of file diff --git a/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml b/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml deleted file mode 100644 index ebd99af7a..000000000 --- a/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: -- zos_blockinfile - was unable to use double quotes which prevented some use - cases and did not display an approriate message. The fix now allows for - double quotes to be used with the module. - (https://github.com/ansible-collections/ibm_zos_core/pull/680) \ No newline at end of file diff --git a/changelogs/fragments/439-add-f-recordtype.yml b/changelogs/fragments/439-add-f-recordtype.yml deleted file mode 100644 index 6c5e72f49..000000000 --- a/changelogs/fragments/439-add-f-recordtype.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_data_set - record format = 'F' has been added to support 'fixed' block records. - This allows records that can use the entire block. - (https://github.com/ansible-collections/ibm_zos_core/pull/821) \ No newline at end of file diff --git a/changelogs/fragments/574-zos_find_stoppedonnotfound.yml b/changelogs/fragments/574-zos_find_stoppedonnotfound.yml deleted file mode 100644 index 48eebe523..000000000 --- a/changelogs/fragments/574-zos_find_stoppedonnotfound.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_find - fixes a bug where find result values stopped being returned after - first value in a list was 'not found'. - (https://github.com/ansible-collections/ibm_zos_core/pull/668) diff --git a/changelogs/fragments/584-zos_lineinfile-error-message.yml b/changelogs/fragments/584-zos_lineinfile-error-message.yml deleted file mode 100644 index fad485765..000000000 --- a/changelogs/fragments/584-zos_lineinfile-error-message.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: -- Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". diff --git a/changelogs/fragments/602-text-converter-import.yml b/changelogs/fragments/602-text-converter-import.yml deleted file mode 100644 index 24f719c26..000000000 --- a/changelogs/fragments/602-text-converter-import.yml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- Updated the text converter import from "from ansible.module_utils._text" - to "from ansible.module_utils.common.text.converters" to remove - warning".. warn Use ansible.module_utils.common.text.converters instead.". - (https://github.com/ansible-collections/ibm_zos_core/pull/602) - diff --git a/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml b/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml deleted file mode 100644 index 970741107..000000000 --- a/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: -- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. - (https://github.com/ansible-collections/ibm_zos_core/pull/723) \ No newline at end of file diff --git a/changelogs/fragments/654-new-module-zos_volume_init.yml b/changelogs/fragments/654-new-module-zos_volume_init.yml deleted file mode 100644 index 41808d718..000000000 --- a/changelogs/fragments/654-new-module-zos_volume_init.yml +++ /dev/null @@ -1,2 +0,0 @@ -major_changes: -- zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. (https://github.com/ansible-collections/ibm_zos_core/pull/654) \ No newline at end of file diff --git a/changelogs/fragments/659-zos-lineinfile-f-string.yml b/changelogs/fragments/659-zos-lineinfile-f-string.yml deleted file mode 100644 index bd5e0b269..000000000 --- a/changelogs/fragments/659-zos-lineinfile-f-string.yml +++ /dev/null @@ -1,8 +0,0 @@ -bugfixes: -- zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed - to ensure support for Python 2.7 on the controller. - (https://github.com/ansible-collections/ibm_zos_core/pull/659) -trivial: -- Remove changelog fragments no longer needed as they are already recorded in - the prior version of IBM z/OS Core. - (https://github.com/ansible-collections/ibm_zos_core/pull/659) \ No newline at end of file diff --git a/changelogs/fragments/663-zos_gather_facts-update-docstring.yml b/changelogs/fragments/663-zos_gather_facts-update-docstring.yml deleted file mode 100644 index d6ba48dd7..000000000 --- a/changelogs/fragments/663-zos_gather_facts-update-docstring.yml +++ /dev/null @@ -1,2 +0,0 @@ -trivial: -- zos_gather_facts - add sample output to RETURN docstring. (https://github.com/ansible-collections/ibm_zos_core/pull/722) \ No newline at end of file diff --git a/changelogs/fragments/666-zos_tso_command_maxrc.yml b/changelogs/fragments/666-zos_tso_command_maxrc.yml deleted file mode 100644 index c410c00b5..000000000 --- a/changelogs/fragments/666-zos_tso_command_maxrc.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_tso_command - was enhanced to accept `max_rc` as an option. This option - allows a non-zero return code to succeed as a valid return code. - (https://github.com/ansible-collections/ibm_zos_core/pull/666) diff --git a/changelogs/fragments/667-template-support.yml b/changelogs/fragments/667-template-support.yml deleted file mode 100644 index 2ac499a3d..000000000 --- a/changelogs/fragments/667-template-support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - Add support for Jinja2 templates in zos_copy and zos_job_submit - when using local source files. - (https://github.com/ansible-collections/ibm_zos_core/pull/667) \ No newline at end of file diff --git a/changelogs/fragments/683-zos_job_submit-bugs.yml b/changelogs/fragments/683-zos_job_submit-bugs.yml deleted file mode 100644 index b77fbdbc9..000000000 --- a/changelogs/fragments/683-zos_job_submit-bugs.yml +++ /dev/null @@ -1,35 +0,0 @@ -bugfixes: -- zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that a - stack trace would result in the response, issue 623. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job is purged by the system that a - stack trace would result in the response, issue 681. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue where the response did not include the - job log when a non-zero return code would occur, issue 655. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when resources (data sets) identified in JCL - did not exist such that a stack trace would result in the response, issue 624. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would - result in a `type` error that a stack trace would result in the response, - issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job encounters a security exception no - job log would would result in the response, issue 684. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job is configured for a syntax check - using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` - to return a response, issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job is configured for a syntax check - using TYPRUN=SCAN that no job log would result in the response, issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -trivial: -- zos_job_submit - Update documentation to for deprecated `wait` option and - expand on the `wait_time_s` description, issue 670. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Update documentation to describing the significance of '?' - for the 'ret_code' properties 'msg_text', 'msg_code' and 'msg', issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_operator - Update restructured text to include the updated examples. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) \ No newline at end of file diff --git a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml deleted file mode 100644 index d9924cb2d..000000000 --- a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml +++ /dev/null @@ -1,6 +0,0 @@ -major_changes: -- zos_copy - Previously, backups were taken when force was set to false; - whether or not a user specified this operation which caused allocation issues - with space and permissions. This removes the automatic backup performed and - reverts to the original logic in that backups must be initiated by the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/896) diff --git a/changelogs/fragments/727-zos-blockinfile-examples.yml b/changelogs/fragments/727-zos-blockinfile-examples.yml deleted file mode 100644 index f1c94c12b..000000000 --- a/changelogs/fragments/727-zos-blockinfile-examples.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: -- zos_blockinfile - was missing examples using Jinja2 and files. This change - adds a Jinja2 example in both the src and block content. It also includes - an example using a file as source. - (https://github.com/ansible-collections/ibm_zos_core/pull/727) \ No newline at end of file diff --git a/changelogs/fragments/729-zos_operator-example-added.yml b/changelogs/fragments/729-zos_operator-example-added.yml deleted file mode 100644 index 46cb6ab84..000000000 --- a/changelogs/fragments/729-zos_operator-example-added.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- zos_operator - had a need for more command examples. This change adds the - D SYMBOLS example. - (https://github.com/ansible-collections/ibm_zos_core/pull/730) \ No newline at end of file diff --git a/changelogs/fragments/731-zos_linefile-disposition_share.yaml b/changelogs/fragments/731-zos_linefile-disposition_share.yaml deleted file mode 100644 index da6dbc19b..000000000 --- a/changelogs/fragments/731-zos_linefile-disposition_share.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- zos_lineinfile - would access data sets with exclusive access so no other - task can read the data, this enhancement allows for a data set to be opened - with a disposition set to share so that other tasks can access the data when - option `force` is set to `true`. - (https://github.com/ansible-collections/ibm_zos_core/pull/731) \ No newline at end of file diff --git a/changelogs/fragments/734-copy-loadlib-member-test-case.yml b/changelogs/fragments/734-copy-loadlib-member-test-case.yml deleted file mode 100644 index 4482c61da..000000000 --- a/changelogs/fragments/734-copy-loadlib-member-test-case.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- zos_copy - Adds a test cases to ensure copying from a PDS/E member containing - a loadlib to another PDS/E member loadlib member for issue 601. - (https://github.com/ansible-collections/ibm_zos_core/pull/734) \ No newline at end of file diff --git a/changelogs/fragments/740-zos_copy-volume-symbol-test.yml b/changelogs/fragments/740-zos_copy-volume-symbol-test.yml deleted file mode 100644 index a30a50869..000000000 --- a/changelogs/fragments/740-zos_copy-volume-symbol-test.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: -- zos_copy - prior, there was no test case for symbols on a volume. - This change adds a test case to test a volume which has in it symbols, - issue 738. - (https://github.com/ansible-collections/ibm_zos_core/pull/740) \ No newline at end of file diff --git a/changelogs/fragments/743-zos_copy-encoding-bugs.yml b/changelogs/fragments/743-zos_copy-encoding-bugs.yml deleted file mode 100644 index 1b58ddabe..000000000 --- a/changelogs/fragments/743-zos_copy-encoding-bugs.yml +++ /dev/null @@ -1,9 +0,0 @@ -bugfixes: -- zos_copy - Fixes a bug where files not encoded in IBM-1047 - would trigger an error while computing the record length - for a new destination dataset. Issue 664. - (https://github.com/ansible-collections/ibm_zos_core/pull/743) -- zos_copy - Fixes a bug where the code for fixing an issue with - newlines in files (issue 599) would use the wrong encoding - for normalization. Issue 678. - (https://github.com/ansible-collections/ibm_zos_core/pull/743) diff --git a/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml b/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml deleted file mode 100644 index 0830b8fe3..000000000 --- a/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: -- zos_job_output - Error message did not specify the job not found. - Fix now specifies the job_id or job_name being searched to ensure more - information is given back to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/747) diff --git a/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml b/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml deleted file mode 100644 index d7aae1c14..000000000 --- a/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_operator - Reported a failure caused by unrelated error response. - Fix now gives a transparent response of the operator to avoid false negatives. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file diff --git a/changelogs/fragments/766-ac-command-replace-makefile.yml b/changelogs/fragments/766-ac-command-replace-makefile.yml deleted file mode 100644 index ca0d17e0f..000000000 --- a/changelogs/fragments/766-ac-command-replace-makefile.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- ac - fixed makefile limitations and monolithic design. Command 'ac' performs - similar function only with greater automation and detection and modularity. - (https://github.com/ansible-collections/ibm_zos_core/pull/766) \ No newline at end of file diff --git a/changelogs/fragments/771-update-ansible-version.yaml b/changelogs/fragments/771-update-ansible-version.yaml deleted file mode 100644 index 92354841b..000000000 --- a/changelogs/fragments/771-update-ansible-version.yaml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: -- doc - Updated the documentation in the README and release_notes.rst to reflect - ansible, ansible-core, Automation Hub and z/OS version. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) -- templates - Update the git issue templates with current and - future product versions. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file diff --git a/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml b/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml deleted file mode 100644 index 672c454b7..000000000 --- a/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. - Fix now encodes all content in a source directory, including - subdirectories. - (https://github.com/ansible-collections/ibm_zos_core/pull/772). \ No newline at end of file diff --git a/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml b/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml deleted file mode 100644 index 0a8ce0adb..000000000 --- a/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- zos_copy - Adds block_size, record_format, record_length, space_primary, - space_secondary, space_type and type in the return output when - the destination data set does not exist and has to be created - by the module. - (https://github.com/ansible-collections/ibm_zos_core/pull/773) \ No newline at end of file diff --git a/changelogs/fragments/778-query-new-fields.yml b/changelogs/fragments/778-query-new-fields.yml deleted file mode 100644 index 9f2c71579..000000000 --- a/changelogs/fragments/778-query-new-fields.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: -- zos_job_query - Adds new fields job_class, svc_class, priority, asid, - creation_datetime, and queue_position to the return output when querying - or submitting a job. Available when using ZOAU v1.2.3 or greater. - (https://github.com/ansible-collections/ibm_zos_core/pull/778) diff --git a/changelogs/fragments/789-ac-command-add-test.yml b/changelogs/fragments/789-ac-command-add-test.yml deleted file mode 100644 index 56cae6936..000000000 --- a/changelogs/fragments/789-ac-command-add-test.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Adds support to run single test from test suite. - (https://github.com/ansible-collections/ibm_zos_core/pull/793) \ No newline at end of file diff --git a/changelogs/fragments/789-ac-command-updates.yml b/changelogs/fragments/789-ac-command-updates.yml deleted file mode 100644 index c0c60dcf1..000000000 --- a/changelogs/fragments/789-ac-command-updates.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Adds new mounts, targets and ansible 2.15 requirements.env. - (https://github.com/ansible-collections/ibm_zos_core/pull/789) \ No newline at end of file diff --git a/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml b/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml deleted file mode 100644 index 3d29e906e..000000000 --- a/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml +++ /dev/null @@ -1,11 +0,0 @@ -bugfixes: -- zos_copy - Test case for recursive encoding directories reported a - UTF-8 failure. This change ensures proper test coverage for nested - directories and file permissions. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_copy - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_fetch - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). \ No newline at end of file diff --git a/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml b/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml deleted file mode 100644 index dd5b71220..000000000 --- a/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: - - zos_copy - Reported a false positive such that the response would have - `changed=true` when copying from a source (src) or destination (dest) - data set that was in use (DISP=SHR). This change now displays an appropriate - error message and returns `changed=false`. - (https://github.com/ansible-collections/ibm_zos_core/pull/794). \ No newline at end of file diff --git a/changelogs/fragments/795_overwrite_permissions_on_copy.yml b/changelogs/fragments/795_overwrite_permissions_on_copy.yml deleted file mode 100644 index 2a8d826d7..000000000 --- a/changelogs/fragments/795_overwrite_permissions_on_copy.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_copy - kept permissions on target directory when copy overwrote - files. The fix now set permissions when mode is given. - (https://github.com/ansible-collections/ibm_zos_core/pull/795) \ No newline at end of file diff --git a/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml b/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml deleted file mode 100644 index c4ad9901c..000000000 --- a/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml +++ /dev/null @@ -1,17 +0,0 @@ -trivial: -- zos_data_set - when a member is created by the module, the format is type - data which is not suitable for executables. This change describes the - format used when creating member. - (https://github.com/ansible-collections/ibm_zos_core/pull/791) -- ac - Reported an issue when functional tests ran leaving behind files. Fix - now removes the unwanted files. - (https://github.com/ansible-collections/ibm_zos_core/pull/791) -bugfixes: -- zos_data_set - Reported a failure caused when `present=absent` for a VSAM - data set leaving behind cluster components. Fix introduces a new logical - flow that will evaluate the volumes, compare it to the provided value and - if necessary catalog and delete. - (https://github.com/ansible-collections/ibm_zos_core/pull/791). -- module_utils - data_set.py - Reported a failure caused when cataloging a - VSAM data set. Fix now corrects how VSAM data sets are cataloged. - (https://github.com/ansible-collections/ibm_zos_core/pull/791). \ No newline at end of file diff --git a/changelogs/fragments/812-ansible-lint.yml b/changelogs/fragments/812-ansible-lint.yml deleted file mode 100644 index 0cb520884..000000000 --- a/changelogs/fragments/812-ansible-lint.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- ansible-lint - enabling ansible-lint for 2.15 and Ansible Automation Platform - certification. - (https://github.com/ansible-collections/ibm_zos_core/pull/812) diff --git a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml deleted file mode 100644 index 5b4e14aa8..000000000 --- a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: -- zos_copy - Module returned the dynamic values created with the same dataset type - and record format. Fix validate the correct dataset type and record format of - target created. - (https://github.com/ansible-collections/ibm_zos_core/pull/824) \ No newline at end of file diff --git a/changelogs/fragments/839-Add-Field-to-zos-job-query.yml b/changelogs/fragments/839-Add-Field-to-zos-job-query.yml deleted file mode 100644 index 52370356c..000000000 --- a/changelogs/fragments/839-Add-Field-to-zos-job-query.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: -- zos_job_query - zoau added 'program_name' to their field output - starting with v1.2.4. This enhancement checks for that version and passes the extra column through. - (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_submit - zoau added 'program_name' to their field output - starting with v1.2.4. This enhancement checks for that version and passes the extra column through. - (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_output - zoau added 'program_name' to their field output - starting with v1.2.4. This enhancement checks for that version and passes the extra column through. - (https://github.com/ansible-collections/ibm_zos_core/pull/841) diff --git a/changelogs/fragments/840-redesign-test-cases.yml b/changelogs/fragments/840-redesign-test-cases.yml deleted file mode 100644 index c998eeee4..000000000 --- a/changelogs/fragments/840-redesign-test-cases.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: -- zos_lininfile - Adjust test cases to be in one document and clearer to follow. -- zos_blockinfile - Adjust test cases to be in one document and clearer to follow. -bugfixes: -- zos_blockinfile - Test case generate a data set that was not correctly removed. - Changes delete the correct data set not only member. - (https://github.com/ansible-collections/ibm_zos_core/pull/840) \ No newline at end of file diff --git a/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml b/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml deleted file mode 100644 index 64ab4871c..000000000 --- a/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml +++ /dev/null @@ -1,6 +0,0 @@ -trivial: -- zos_tso_command - Test suite was set up to run sequentially such that - certain tests relied on prior test cases. The new changes combine those - inter-dependent test cases into a single test case so that each individual - test case can now be run stand-alone. - (https://github.com/ansible-collections/ibm_zos_core/pull/895) \ No newline at end of file diff --git a/changelogs/fragments/906-update-doc-generation.yml b/changelogs/fragments/906-update-doc-generation.yml deleted file mode 100644 index f2e5ae316..000000000 --- a/changelogs/fragments/906-update-doc-generation.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- make - Current doc generation requires manual intervention, this change will - allow for doc generation without any manual intervention and removes warnings. - (https://github.com/ansible-collections/ibm_zos_core/pull/906) \ No newline at end of file diff --git a/changelogs/fragments/911-Improve-job-query-performance.yml b/changelogs/fragments/911-Improve-job-query-performance.yml deleted file mode 100644 index a6722636e..000000000 --- a/changelogs/fragments/911-Improve-job-query-performance.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. - This change removes those resulting in a performance increase in job related queries. - (https://github.com/ansible-collections/ibm_zos_core/pull/911) diff --git a/changelogs/fragments/v1.7.0-beta.1_summary.yml b/changelogs/fragments/v1.7.0-beta.1_summary.yml deleted file mode 100644 index 727e3da75..000000000 --- a/changelogs/fragments/v1.7.0-beta.1_summary.yml +++ /dev/null @@ -1,6 +0,0 @@ -release_summary: | - Release Date: '2023-07-26' - This changelog describes all changes made to the modules and plugins included - in this collection. The release date is the date the changelog is created. - For additional details such as required dependencies and availability review - the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From 23a06330eb7d4cb73669fe4637d3a8ae2218c2f8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 27 Jul 2023 23:53:49 -0700 Subject: [PATCH 156/413] Update test with string match Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_archive_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index e3b4b4ba7..8ac4f2e9d 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -545,7 +545,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - assert data_set.get("name") not in c_result.get("stdout") + assert data_set.get("name") != c_result.get("stdout") finally: hosts.all.zos_data_set(name=data_set.get("name"), state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") From c82ceee0034344d1b624b34524b94a48546de40e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 22:45:32 -0700 Subject: [PATCH 157/413] Unused changed variable, found by flake8 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_archive.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 0ace2b608..f00968d62 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -790,7 +790,8 @@ def create_dest_ds(self, name): name {str} - name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + #changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) From 5edf8939ef8bea5d42d348bb91136811481b548d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 22:46:25 -0700 Subject: [PATCH 158/413] Unused 'normalize_line_endings' functiion found by flake8 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_copy.py | 47 ------------------------------------- 1 file changed, 47 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 02f71ab21..6b5e8ab7f 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2162,53 +2162,6 @@ def data_set_locked(dataset_name): return False -def normalize_line_endings(src, encoding=None): - """ - Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes - its line endings to LF. - - Arguments: - src (str) -- Path of a USS file. - encoding (dict, optional) -- Encoding options for the module. - - Returns: - str -- Path to the normalized file. - """ - # Before copying into a destination dataset, we'll make sure that - # the source file doesn't contain any carriage returns that would - # result in empty records in the destination. - # Due to the differences between encodings, we'll normalize to IBM-037 - # before checking the EOL sequence. - enc_utils = encode.EncodeUtils() - src_tag = enc_utils.uss_file_tag(src) - copy_handler = CopyHandler(AnsibleModuleHelper(dict())) - - if src_tag == "untagged": - # This should only be true when src is a remote file and no encoding - # was specified by the user. - if not encoding: - encoding = {"from": encode.Defaults.get_default_system_charset()} - src_tag = encoding["from"] - - if src_tag != "IBM-037": - fd, converted_src = tempfile.mkstemp() - os.close(fd) - - enc_utils.uss_convert_encoding( - src, - converted_src, - src_tag, - "IBM-037" - ) - copy_handler._tag_file_encoding(converted_src, "IBM-037") - src = converted_src - - if copy_handler.file_has_crlf_endings(src): - src = copy_handler.create_temp_with_lf_endings(src) - - return src - - def run_module(module, arg_def): # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError From 320caccc627332bd82a77e145c9eb62957732a15 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 22:56:16 -0700 Subject: [PATCH 159/413] Correct flake8, import 'path' from line 18 shadowed by loop variable Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/template.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 3f0c95021..308946da2 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -298,9 +298,9 @@ def render_dir_template(self, variables): to_native(err) )) - for path, subdirs, files in os.walk(self.template_dir): + for dirpath, subdirs, files in os.walk(self.template_dir): for template_file in files: - relative_dir = os.path.relpath(path, self.template_dir) + relative_dir = os.path.relpath(dirpath, self.template_dir) file_path = os.path.normpath(os.path.join(relative_dir, template_file)) try: From f2f41f53bed265561dfa2370df7c239bc93c1b0f Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 23:03:18 -0700 Subject: [PATCH 160/413] Correct comment starting at a new line Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_archive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index f00968d62..6b7fcbeb0 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -791,7 +791,7 @@ def create_dest_ds(self, name): """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) - #changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) From 2f8af15be818fa4ee439f84f143bd97098bdaf7e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 1 Aug 2023 09:39:27 -0700 Subject: [PATCH 161/413] Updated new script copyright year Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/scripts/post-zos_apf.sh | 2 +- docs/scripts/pre-doc-gen.sh | 2 +- docs/scripts/pre-template.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh index a74207e48..befcaecfe 100755 --- a/docs/scripts/post-zos_apf.sh +++ b/docs/scripts/post-zos_apf.sh @@ -1,7 +1,7 @@ #!/bin/sh ################################################################################ -# © Copyright IBM Corporation 2020 +# © Copyright IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/docs/scripts/pre-doc-gen.sh b/docs/scripts/pre-doc-gen.sh index 31e287c11..e2f4d362c 100755 --- a/docs/scripts/pre-doc-gen.sh +++ b/docs/scripts/pre-doc-gen.sh @@ -1,7 +1,7 @@ #!/bin/sh ################################################################################ -# © Copyright IBM Corporation 2020 +# © Copyright IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh index 8c627e0a5..ca35775d9 100755 --- a/docs/scripts/pre-template.sh +++ b/docs/scripts/pre-template.sh @@ -1,7 +1,7 @@ #!/bin/sh ################################################################################ -# © Copyright IBM Corporation 2020 +# © Copyright IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 36c11dfc8a1751db8470f6cedecfe6604f88ecd0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 1 Aug 2023 11:17:11 -0700 Subject: [PATCH 162/413] Fix release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index e512de025..948851218 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -29,6 +29,7 @@ Minor Changes - ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. - ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - ``zos_job_query`` + - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - removes unnecessary queries to find DDs improving the modules performance. - ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). From d5b949e71eb5830cc9664d7c840cfc1b06f7b8f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 3 Aug 2023 16:22:00 -0600 Subject: [PATCH 163/413] Bugfix/583/zos lineinfile does not behave community (#916) * First iteration * Clean test apf_func * Add test case to validate change in ZOAU 1.2.4 * Change test case for the new change * Change zos_job_query test accord to ZOAU 1.2.4 * Restore test as dev * Return test to originals * Return job_query as original * Add fragment * Add test case for DS and change fragments * Solve check of testing for DS * Change logic of tests --- ...s-lineinfile-does-not-behave-community.yml | 4 ++ .../modules/test_zos_lineinfile_func.py | 56 ++++++++++++++++++- 2 files changed, 57 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml diff --git a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml new file mode 100644 index 000000000..c1639c769 --- /dev/null +++ b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml @@ -0,0 +1,4 @@ +bugfix: +- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. + Fix now prevents a duplicate entry if the line already exists in the target file. + (https://github.com/ansible-collections/ibm_zos_core/pull/916) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 6a29c79b8..754316ff3 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -103,7 +103,7 @@ ZOAU_ROOT=/usr/lpp/zoautil/v100 export ZOAU_ROOT export _BPXK_AUTOCVT -export ZOAU_ROOT""" +export 'ZOAU_ROOT'""" EXPECTED_INSERTBEFORE_BOF="""# this is file is for setting env vars if [ -z STEPLIB ] && tty -s; @@ -310,7 +310,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): @pytest.mark.uss def test_uss_line_insertafter_eof(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -515,6 +515,28 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): finally: remove_uss_environment(ansible_zos_module) +@pytest.mark.uss +def test_uss_line_does_not_insert_repeated(ansible_zos_module): + hosts = ansible_zos_module + params = dict(path="", line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == TEST_CONTENT + # Run lineinfle module with same params again, ensure duplicate entry is not made into file + hosts.all.zos_lineinfile(**params) + results = hosts.all.shell(cmd="""grep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' {0} """.format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == '1' + finally: + remove_uss_environment(ansible_zos_module) ######################### # Dataset test cases @@ -573,7 +595,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): def test_ds_line_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") test_name = "DST3" temp_file = "/tmp/{0}".format(test_name) ds_name = test_name.upper() + "." + ds_type @@ -946,6 +968,34 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") +@pytest.mark.ds +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") + test_name = "DST15" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == TEST_CONTENT + # Run lineinfle module with same params again, ensure duplicate entry is not made into file + hosts.all.zos_lineinfile(**params) + results = hosts.all.shell(cmd="""dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' "{0}" """.format(params["path"])) + response = params["path"] + " " + "1" + for result in results.contacted.values(): + assert result.get("stdout") == response + finally: + remove_ds_environment(ansible_zos_module, ds_name) + ######################### # Encoding tests ######################### From f77a9f238b997f1503f91a080b6817463744bd0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 3 Aug 2023 19:16:27 -0600 Subject: [PATCH 164/413] Deprecate debug=true in zos_blockinfile and set as_json=true (#904) * Change debug for as_json option * Add fragment --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> --- ..._debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml | 2 ++ plugins/modules/zos_blockinfile.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml diff --git a/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml b/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml new file mode 100644 index 000000000..9218a0ed3 --- /dev/null +++ b/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml @@ -0,0 +1,2 @@ +deprecated_features: + - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 7484d93ec..1751c6472 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -386,7 +386,7 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, debug=True, options=force) + return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, options=force, as_json=True) def absent(src, marker, encoding, force): @@ -402,7 +402,7 @@ def absent(src, marker, encoding, force): found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, debug=True, options=force) + return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, options=force, as_json=True) def quotedString(string): From 4e7983e6ccaba382fd39b11e09429074afa5ccfc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 4 Aug 2023 14:12:59 -0600 Subject: [PATCH 165/413] Add test case to validate response come back complete (#918) * Add test case to validate response come back complete * Add fragment --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> --- ...918-zos-operator-response-come-back-truncate.yaml | 4 ++++ tests/functional/modules/test_zos_operator_func.py | 12 ++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml new file mode 100644 index 000000000..ef5ae8b36 --- /dev/null +++ b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml @@ -0,0 +1,4 @@ +bugfix: +- zos_operator: The last line of the operator was missing in the response of the module. + Fix now ensures the presence of the full output of the operator. + (https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index dbdb4f065..84f593f51 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -118,3 +118,15 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): assert result.get("content") is not None # Account for slower network assert result.get('elapsed') <= (2 * wait_time_s) + + +def test_response_come_back_complete(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_operator(cmd="\$dspl") + res = dict() + res["stdout"] = [] + for result in results.contacted.values(): + stdout = result.get('content') + # HASP646 Only appears in the last line that before did not appears + last_line = len(stdout) + assert "HASP646" in stdout[last_line - 1] \ No newline at end of file From b857fdfdc2111b0936cb76ddb25758ed5aa51444 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 17 Aug 2023 13:11:14 -0600 Subject: [PATCH 166/413] Remove conditional unnecessary (#934) * Remove conditional unecesary * Add fragment * Correct the conditional --- .../fragments/934-Remove-conditional-unnecessary.yml | 2 ++ tests/functional/modules/test_zos_blockinfile_func.py | 8 ++------ 2 files changed, 4 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/934-Remove-conditional-unnecessary.yml diff --git a/changelogs/fragments/934-Remove-conditional-unnecessary.yml b/changelogs/fragments/934-Remove-conditional-unnecessary.yml new file mode 100644 index 000000000..bf07c7f32 --- /dev/null +++ b/changelogs/fragments/934-Remove-conditional-unnecessary.yml @@ -0,0 +1,2 @@ +- trivial: + - zos_blockinfile - remove test conditional unnecessary (https://github.com/ansible-collections/ibm_zos_core/pull/934). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 23982aeec..226f34477 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1218,12 +1218,8 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - if backup_name: - backup_ds_name = result.get("backup_name") - assert backup_ds_name is not None - else: - backup_ds_name = result.get("backup_name") - assert backup_ds_name is not None + backup_ds_name = result.get("backup_name") + assert backup_ds_name is not None results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF From 5d43c7c4f9ad308797177016a418ab21be94cf65 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 28 Aug 2023 12:32:34 -0600 Subject: [PATCH 167/413] v1.7.0 beta.2 into dev (#953) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan… * Enhancement/866 archive (#930) * Added action plugin zos_unarchive * Added zos_archive changes * Added zos_unarchive changes * Added zos_archive tests changes * Added test zos_unarchive changes * Added zos_archive changes * fixed pep8 issues * Changed source to src in docs * Added correct copyright year * Updated docs * Added changelog fragments * Updated docs * Updated galaxy.yml * Updated meta * Updated docs * Added zos_gather_facts rst * Added changelog * Added release notes * Changed variable name to avoid shadowing import * Delete 930-archive-post-beta.yml * Delete v1.7.0-beta.2_summary.yml * Resolve conflicts --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- CHANGELOG.rst | 25 +++ changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 28 +++ docs/source/modules/zos_archive.rst | 16 +- docs/source/modules/zos_gather_facts.rst | 5 + docs/source/release_notes.rst | 34 +++- galaxy.yml | 4 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_unarchive.py | 38 ++-- plugins/modules/zos_archive.py | 188 +++++++++++++----- .../modules/test_zos_archive_func.py | 15 +- .../modules/test_zos_unarchive_func.py | 30 ++- 12 files changed, 297 insertions(+), 90 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 826161e56..c6b3a91e0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,31 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.7.0-beta.2 +============= + +Release Summary +--------------- + +Release Date: '2023-08-21' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + +Bugfixes +-------- + +- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + v1.7.0-beta.1 ============= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 3520dc55a..c07ea8e62 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -126,4 +126,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.7.0-beta.1 +version: 1.7.0-beta.2 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 5f4da9de0..753c8e318 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -981,3 +981,31 @@ releases: name: zos_unarchive namespace: '' release_date: '2023-07-26' + 1.7.0-beta.2: + changes: + bugfixes: + - zos_archive - Module did not return the proper src state after archiving. + Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + minor_changes: + - zos_archive - If destination data set space is not provided then the module + computes it based on the src list and/or expanded src list based on pattern + provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_archive - When xmit faces a space error in xmit operation because of dest + or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_unarchive - When copying to remote fails now a proper error message is + displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_unarchive - When copying to remote if space_primary is not defined, then + is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + release_summary: 'Release Date: ''2023-08-21'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 930-archive-post-beta.yml + - v1.7.0-beta.2_summary.yml + release_date: '2023-08-21' diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 221de41ec..03eaafbae 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -112,14 +112,20 @@ dest If *dest* is a nonexistent USS file, it will be created. + If *dest* is an existing file or data set and *force=true*, the existing *dest* will be deleted and recreated with attributes defined in the *dest_data_set* option or computed by the module. + + If *dest* is an existing file or data set and *force=false* or not specified, the module exits with a note to the user. + Destination data set attributes can be set using *dest_data_set*. + Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the *dest_data_set* option will improve performance. + | **required**: True | **type**: str exclude - Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. + Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from src list and glob expansion. Patterns (wildcards) can contain one of the following, `?`, `*`. @@ -152,7 +158,7 @@ mode The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - *mode=preserve* means that the file will be given the same permissions as the source file. + *mode=preserve* means that the file will be given the same permissions as the src file. | **required**: False | **type**: str @@ -170,7 +176,7 @@ owner remove - Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *path*. + Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *src*. | **required**: False | **type**: bool @@ -387,6 +393,8 @@ Notes When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. + See Also @@ -425,7 +433,7 @@ dest_state ``compress`` when the file is compressed, but not an archive. - ``incomplete`` when the file is an archive, but some files under *path* were not found. + ``incomplete`` when the file is an archive, but some files under *src* were not found. | **returned**: success | **type**: str diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 63bd22701..232cc26ba 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -22,6 +22,11 @@ Synopsis - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. +Requirements +------------ + +- ZOAU 1.2.1 or later. + diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 948851218..9a7bdb059 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,39 @@ Releases ======== +Version 1.7.0-beta.2 +==================== + +Minor Changes +------------- +- ``zos_archive`` + + - When xmit faces a space error in xmit operation because of dest or log data set being filled raises an appropriate error hint. + - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. + +- ``zos_unarchive`` + + - When copying to remote fails now a proper error message is displayed. + - When copying to remote if space_primary is not defined, then is defaulted to 5M. + +Bugfixes +-------- +- ``zos_archive`` - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. + Version 1.7.0-beta.1 ==================== @@ -29,7 +62,6 @@ Minor Changes - ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. - ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - ``zos_job_query`` - - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - removes unnecessary queries to find DDs improving the modules performance. - ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). diff --git a/galaxy.yml b/galaxy.yml index 87f10f272..b1090564c 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.7.0-beta.1 +version: 1.7.0-beta.2 # Collection README file readme: README.md @@ -19,7 +19,7 @@ authors: - Ivan Moreno <ivan.moreno.soto@ibm.com> - Oscar Fernando Flores Garcia <fernando.flores@ibm.com> - Jenny Huang <jennyhuang@ibm.com> - - Marcel Guitierrez <andre.marcel.gutierrez@ibm.com> + - Marcel Gutierrez <andre.marcel.gutierrez@ibm.com> # Description description: The IBM z/OS core collection includes connection plugins, action plugins, modules, filters and ansible-doc to automate tasks on z/OS. diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 5e265309f..f659df786 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.7.0-beta.1" +version: "1.7.0-beta.2" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 7c310a4a3..19cbf5ead 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -61,6 +61,8 @@ def run(self, tmp=None, task_vars=None): format_name = format.get("name") copy_module_args = dict() dest_data_set = format.get("dest_data_set") + if dest_data_set is None: + dest_data_set = dict() dest = "" if source.startswith('~'): source = os.path.expanduser(source) @@ -80,14 +82,12 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars, ) dest = cmd_res.get("stdout") - if dest_data_set is None: - if format_name == 'terse': - dest_data_set = dict(type='SEQ', record_format='FB', record_length=1024) - if format_name == 'xmit': - dest_data_set = dict(type='SEQ', record_format='FB', record_length=80) - else: - # Raise unsupported format name - None + if dest_data_set.get("space_primary") is None: + dest_data_set.update(space_primary=5, space_type="M") + if format_name == 'terse': + dest_data_set.update(type='SEQ', record_format='FB', record_length=1024) + if format_name == 'xmit': + dest_data_set.update(type='SEQ', record_format='FB', record_length=80) copy_module_args.update( dict( @@ -107,15 +107,17 @@ def run(self, tmp=None, task_vars=None): templar=self._templar, shared_loader_obj=self._shared_loader_obj) result.update(zos_copy_action_module.run(task_vars=task_vars)) - - module_args["src"] = dest - display.vvv(u"Copy args {0}".format(result), host=self._play_context.remote_addr) - - result.update( - self._execute_module( - module_name="ibm.ibm_zos_core.zos_unarchive", - module_args=module_args, - task_vars=task_vars, + display.vvv(u"Copy result {0}".format(result), host=self._play_context.remote_addr) + if result.get("msg") is None: + module_args["src"] = dest + + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_unarchive", + module_args=module_args, + task_vars=task_vars, + ) ) - ) + else: + result.update(dict(failed=True)) return result diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 6b7fcbeb0..c48fd767e 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -109,13 +109,22 @@ - I(dest) can be a USS file or MVS data set name. - If I(dest) has missing parent directories, they will be created. - If I(dest) is a nonexistent USS file, it will be created. + - If I(dest) is an existing file or data set and I(force=true), + the existing I(dest) will be deleted and recreated with attributes + defined in the I(dest_data_set) option or computed by the module. + - If I(dest) is an existing file or data set and I(force=false) or not + specified, the module exits with a note to the user. - Destination data set attributes can be set using I(dest_data_set). + - Destination data set space will be calculated based on space of + source data sets provided and/or found by expanding the pattern name. + Calculating space can impact module performance. Specifying space attributes + in the I(dest_data_set) option will improve performance. type: str required: true exclude: description: - Remote absolute path, glob, or list of paths, globs or data set name - patterns for the file, files or data sets to exclude from path list + patterns for the file, files or data sets to exclude from src list and glob expansion. - "Patterns (wildcards) can contain one of the following, `?`, `*`." - "* matches everything." @@ -148,7 +157,7 @@ (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - I(mode=preserve) means that the file will be given the same permissions - as the source file. + as the src file. type: str required: false owner: @@ -164,7 +173,7 @@ description: - Remove any added source files , trees or data sets after module L(zos_archive,./zos_archive.html) adds them to the archive. - Source files, trees and data sets are identified with option I(path). + Source files, trees and data sets are identified with option I(src). type: bool required: false default: false @@ -301,6 +310,10 @@ respectively. - When packing and using C(use_adrdssu) flag the module will take up to two times the space indicated in C(dest_data_set). + - tar, zip, bz2 and pax are archived using python C(tarfile) library which + uses the latest version available for each format, for compatibility when + opening from system make sure to use the latest available version for the + intended format. seealso: @@ -373,7 +386,7 @@ - C(archive) when the file is an archive. - C(compress) when the file is compressed, but not an archive. - C(incomplete) when the file is an archive, but some files under - I(path) were not found. + I(src) were not found. type: str returned: success missing: @@ -403,6 +416,7 @@ ''' from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_bytes from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, @@ -416,6 +430,8 @@ import abc import glob import re +import math +from hashlib import sha256 try: @@ -427,6 +443,7 @@ AMATERSE_RECORD_LENGTH = 1024 STATE_ABSENT = 'absent' +STATE_PRESENT = 'present' STATE_ARCHIVE = 'archive' STATE_COMPRESSED = 'compressed' STATE_INCOMPLETE = 'incomplete' @@ -488,6 +505,8 @@ def __init__(self, module): self.expanded_sources = "" self.expanded_exclude_sources = "" self.dest_state = STATE_ABSENT + self.state = STATE_PRESENT + self.xmit_log_data_set = "" def targets_exist(self): return bool(self.targets) @@ -509,7 +528,7 @@ def find_targets(self): pass @abc.abstractmethod - def _get_checksums(self, path): + def _get_checksums(self, src): pass @abc.abstractmethod @@ -524,17 +543,23 @@ def is_different_from_original(self): def remove_targets(self): pass + @abc.abstractmethod + def compute_dest_size(self): + pass + @property def result(self): return { 'archived': self.archived, 'dest': self.dest, + 'state': self.state, 'arcroot': self.arcroot, 'dest_state': self.dest_state, 'changed': self.changed, 'missing': self.not_found, 'expanded_sources': list(self.expanded_sources), 'expanded_exclude_sources': list(self.expanded_exclude_sources), + 'xmit_log_data_set': self.xmit_log_data_set, } @@ -569,11 +594,29 @@ def find_targets(self): else: self.not_found.append(path) - def _get_checksums(self, path): - md5_cmd = "md5 -r \"{0}\"".format(path) - rc, out, err = self.module.run_command(md5_cmd) - checksums = out.split(" ")[0] - return checksums + def _get_checksums(self, src): + """Calculate SHA256 hash for a given file + + Arguments: + src {str} -- The absolute path of the file + + Returns: + str -- The SHA256 hash of the contents of input file + """ + b_src = to_bytes(src) + if not os.path.exists(b_src) or os.path.isdir(b_src): + return None + blksize = 64 * 1024 + hash_digest = sha256() + try: + with open(to_bytes(src, errors="surrogate_or_strict"), "rb") as infile: + block = infile.read(blksize) + while block: + hash_digest.update(block) + block = infile.read(blksize) + except Exception: + raise + return hash_digest.hexdigest() def dest_checksums(self): if self.dest_exists(): @@ -586,11 +629,18 @@ def is_different_from_original(self): return True def remove_targets(self): + self.state = STATE_ABSENT for target in self.archived: if os.path.isdir(target): - os.removedirs(target) + try: + os.removedirs(target) + except Exception: + self.state = STATE_INCOMPLETE else: - os.remove(target) + try: + os.remove(target) + except PermissionError: + self.state = STATE_INCOMPLETE def archive_targets(self): self.file = self.open(self.dest) @@ -699,34 +749,6 @@ def find_targets(self): else: self.not_found.append(path) - def _compute_dest_data_set_size(self): - """ - Computes the attributes that the destination data set or temporary destination - data set should have in terms of size, record_length, etc. - """ - - """ - - Size of temporary DS for archive handling. - - If remote_src then we can get the source_size from archive on the system. - - If not remote_src then we can get the source_size from temporary_ds. - Both are named src so no problemo. - - If format is xmit, dest_data_set size is the same as source_size. - - If format is terse, dest_data_set size is different than the source_size, has to be greater, - but how much? In this case we can add dest_data_set option. - - Apparently the only problem is when format name is terse. - """ - - # Get the size from the system - default_size = 5 - dest_space_type = 'M' - dest_primary_space = int(default_size) - return dest_primary_space, dest_space_type - def _create_dest_data_set( self, name=None, @@ -833,11 +855,13 @@ def dump_into_temp_ds(self, temp_ds): ) return rc - def _get_checksums(self, path): - md5_cmd = "md5 -r \"//'{0}'\"".format(path) - rc, out, err = self.module.run_command(md5_cmd) - checksums = out.split(" ")[0] - return checksums + def _get_checksums(self, src): + sha256_cmd = "sha256 \"//'{0}'\"".format(src) + rc, out, err = self.module.run_command(sha256_cmd) + checksums = out.split("= ") + if len(checksums) > 0: + return checksums[1] + return None def dest_checksums(self): if self.dest_exists(): @@ -856,8 +880,14 @@ def dest_exists(self): return data_set.DataSet.data_set_exists(self.dest) def remove_targets(self): + self.state = STATE_ABSENT for target in self.archived: - data_set.DataSet.ensure_absent(target) + try: + changed = data_set.DataSet.ensure_absent(target) + except Exception: + self.state = STATE_INCOMPLETE + if not changed: + self.state = STATE_INCOMPLETE return def expand_mvs_paths(self, paths): @@ -892,10 +922,30 @@ def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False data_set.DataSet.ensure_absent(ds) if uss_files is not None: for file in uss_files: - os.remove(file) + try: + os.remove(file) + except PermissionError: + self.state = STATE_INCOMPLETE if remove_targets: + self.remove_targets() + + def compute_dest_size(self): + """ + Calculate the destination data set based on targets found. + Arguments: + + Returns: + {int} - Destination computed space in kilobytes. + """ + if self.dest_data_set.get("space_primary") is None: + dest_space = 0 for target in self.targets: - data_set.DataSet.ensure_absent(target) + data_sets = datasets.listing(target) + for ds in data_sets: + dest_space += int(ds.to_dict().get("total_space")) + # space unit returned from listings is bytes + dest_space = math.ceil(dest_space / 1024) + self.dest_data_set.update(space_primary=dest_space, space_type="K") class AMATerseArchive(MVSArchive): @@ -972,15 +1022,20 @@ def add(self, src, archive): archive: {str} """ log_option = "LOGDSNAME({0})".format(self.xmit_log_data_set) if self.xmit_log_data_set else "NOLOG" - xmit_cmd = """ XMIT A.B - + xmit_cmd = """ + PROFILE NOPREFIX + XMIT A.B - FILE(SYSUT1) OUTFILE(SYSUT2) - {0} - """.format(log_option) dds = {"SYSUT1": "{0},shr".format(src), "SYSUT2": archive} rc, out, err = mvs_cmd.ikjeft01(cmd=xmit_cmd, authorized=True, dds=dds) if rc != 0: + # self.get_error_hint handles the raw output of XMIT executed through TSO, contains different + # error hints based on the abend code returned. + error_hint = self.get_error_hint(out) self.module.fail_json( - msg="An error occurred while executing 'TSO XMIT' to archive {0} into {1}".format(src, archive), + msg="An error occurred while executing 'TSO XMIT' to archive {0} into {1}.{2}".format(src, archive, error_hint), stdout=out, stderr=err, rc=rc, @@ -1022,6 +1077,38 @@ def archive_targets(self): self.add(source, dest) self.clean_environment(data_sets=self.tmp_data_sets) + def get_error_hint(self, output): + """ + Takes a raw TSO XMIT output and parses the abend code and return code to provide an + appropriate error hint for the failure. + If parsing is not possible then return an empty string. + + Arguments: + output (str): Raw TSO XMIT output returned from ikjeft01 when the command fails. + """ + error_messages = dict(D37={"00000004": "There appears to be a space issue. Ensure that there is adequate space and log data sets are not full."}) + + sys_abend, reason_code, error_hint = "", "", "" + find_abend = re.findall(r"ABEND CODE.*REASON", output) + if find_abend: + try: + sys_abend = find_abend[0].split("ABEND CODE ")[1].split(" ")[0] + except IndexError: + return "" + + find_reason_code = re.findall(r"REASON CODE.*", output) + if find_reason_code: + try: + reason_code = find_reason_code[0].split("REASON CODE ")[1].split(" ")[0] + except IndexError: + return "" + + msg = "Operation failed with abend code {0} and reason code {1}. {2}" + if sys_abend in error_messages: + if reason_code in error_messages[sys_abend]: + error_hint = error_messages[sys_abend][reason_code] + return msg.format(sys_abend, reason_code, error_hint) + def run_module(): module = AnsibleModule( @@ -1196,6 +1283,7 @@ def run_module(): archive.find_targets() if archive.targets_exist(): + archive.compute_dest_size() archive.archive_targets() if archive.remove: archive.remove_targets() diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 8ac4f2e9d..9d92134e5 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -344,10 +344,9 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( - # "record_format", ["FB", "VB", "FBA", "VBA", "U"], "record_format", ["FB", "VB",], ) def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): @@ -417,10 +416,9 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( - # "record_format", ["FB", "VB", "FBA", "VBA", "U"], "record_format", ["FB", "VB",], ) def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): @@ -492,10 +490,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data @pytest.mark.parametrize( "record_length", [80], ) -@pytest.mark.parametrize( - "record_format", ["FB", "VB",], -) -def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length, record_format): +def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length): try: hosts = ansible_zos_module # Clean env @@ -507,7 +502,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d type=data_set.get("dstype"), state="present", record_length=record_length, - record_format=record_format, + record_format="FB", replace=True, ) # Create members if needed diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index a4bf5e007..831724f21 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -415,7 +415,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( "record_format", ["FB", "VB",], @@ -902,7 +902,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( "record_format", ["FB", "VB",], @@ -986,3 +986,27 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") tmp_folder.cleanup() + +def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): + try: + hosts = ansible_zos_module + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + # False path + source_path = "/tmp/OMVSADM.NULL" + + format_dict = dict(name='terse') + format_dict["format_options"] = dict(use_adrdssu=True) + + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=source_path, + format=format_dict, + remote_src=False, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("changed") is False + assert result.get("failed", False) is True + print(result) + finally: + tmp_folder.cleanup() From 9d6704daa5ab73623d63d84a19f906cae01d5db1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 29 Aug 2023 10:19:10 -0600 Subject: [PATCH 168/413] Enhancement, improve load module and program object support in zos_copy (#804) * Add is_executable as option * Changes lines on zos_copy to cover as many cases as possible * Delete unecesary print function and solve issue of definition * Add correct dataset type and record created * Ignore the sequetial that create errors * Delete unnrelated cases of executable copy * Cover cases of datasets memebers and USS * Remove the SEQ not supported and support all USS exe files * Test case to cover USS copy, delte unecesary print and add fragment * Solve long line and Unexpected spaces * Solve long line and Unexpected spaces * Remove other solution of bugfix * Solve details in zo * Remove is_ * Add test case to ensure behaviour for uss cases * Solve references of jcl calls * Change description option and test * Remove line in test module and add case of executable for is_compatible function * Solve identation problems * Solve ds backup assignation * Change requesteds * Remove white spaces * Add assignation * Change documents and ensrues proper work with objects in members already created * Remove spaces * Correct conditional * Check the size of correct way * Change corrections --------- Co-authored-by: Demetri <dimatos@gmail.com> --- ...load_module_and_program_object_support.yml | 6 + plugins/action/zos_copy.py | 1 + plugins/module_utils/copy.py | 2 +- plugins/modules/zos_copy.py | 143 ++++++++++++---- .../functional/modules/test_zos_copy_func.py | 154 +++++++++++++++--- 5 files changed, 253 insertions(+), 53 deletions(-) create mode 100644 changelogs/fragments/804-improved_load_module_and_program_object_support.yml diff --git a/changelogs/fragments/804-improved_load_module_and_program_object_support.yml b/changelogs/fragments/804-improved_load_module_and_program_object_support.yml new file mode 100644 index 000000000..07379c1e3 --- /dev/null +++ b/changelogs/fragments/804-improved_load_module_and_program_object_support.yml @@ -0,0 +1,6 @@ +minor_changes: +- zos_copy - includes a new option `executable` that enables copying of executables such + as load modules or program objects to both USS and partitioned data sets. When + the `dest` option contains a non-existent data set, `zos_copy` will create a data set with + the appropriate attributes for an executable. + (https://github.com/ansible-collections/ibm_zos_core/pull/804) \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index b557e8605..c6273132c 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -59,6 +59,7 @@ def run(self, tmp=None, task_vars=None): local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) is_binary = _process_boolean(task_args.get('is_binary'), default=False) + executable = _process_boolean(task_args.get('executable'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) backup_name = task_args.get("backup_name", None) encoding = task_args.get("encoding", None) diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index b4ebaacc7..7edd8a49c 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2019-2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6b5e8ab7f..c50fe8c64 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -77,12 +77,13 @@ - If C(dest) is a nonexistent USS file, it will be created. - If C(dest) is a nonexistent data set, it will be created following the process outlined here and in the C(volume) option. - - If C(dest) is a nonexistent data set, the attributes assigned will depend - on the type of C(src). If C(src) is a USS file, C(dest) will have a - Fixed Block (FB) record format and the remaining attributes will be computed. - If C(src) is binary, C(dest) will have a Fixed Block (FB) record format - with a record length of 80, block size of 32760, and the remaining - attributes will be computed. + - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of + C(src). If C(src) is a USS file, C(dest) will have a Fixed Block (FB) record format and the + remaining attributes will be computed. If I(is_binary=true), C(dest) will have a Fixed Block + (FB) record format with a record length of 80, block size of 32760, and the remaining + attributes will be computed. If I(executable=true),C(dest) will have an Undefined (U) record + format with a record length of 0, block size of 32760, and the remaining attributes will be + computed. - When C(dest) is a data set, precedence rules apply. If C(dest_data_set) is set, this will take precedence over an existing data set. If C(dest) is an empty data set, the empty data set will be written with the @@ -172,6 +173,20 @@ type: bool default: false required: false + executable: + description: + - If set to C(true), indicates that the file or library to be copied is an executable. + - If the C(src) executable has an alias, the alias information is also copied. If the + C(dest) is Unix, the alias is not visible in Unix, even though the information is there and + will be visible if copied to a library. + - If I(executable=true), and C(dest) is a data set, it must be a PDS or PDSE (library). + - If C(dest) is a nonexistent data set, the library attributes assigned will be + Undefined (U) record format with a record length of 0, block size of 32760 and the + remaining attributes will be computed. + - If C(dest) is a file, execute permission for the user will be added to the file (``u+x``). + type: bool + default: false + required: false local_follow: description: - This flag indicates that any existing filesystem links in the source tree @@ -386,6 +401,11 @@ transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member + into a PDSE that contains program objects. You can control this behavior using module option + executable that will signify an executable is being copied into a PDSE with other + executables. Mixing data type members with program objects will be responded with a + (FSUM8976,./zos_copy.html) error. seealso: - module: zos_fetch - module: zos_data_set @@ -553,6 +573,13 @@ space_type: K record_format: VB record_length: 150 + +- name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. + zos_copy: + src: HLQ.COBOLSRC.PDSE(TESTPGM) + dest: HLQ.NEW.PDSE(MYCOBOL) + remote_src: true + executable: true """ RETURN = r""" @@ -750,6 +777,7 @@ def __init__( self, module, is_binary=False, + executable=False, backup_name=None ): """Utility class to handle copying data between two targets @@ -761,11 +789,14 @@ def __init__( Keyword Arguments: is_binary {bool} -- Whether the file or data set to be copied contains binary data + executable {bool} -- Whether the file or data set to be copied + is executable backup_name {str} -- The USS path or data set name of destination backup """ self.module = module self.is_binary = is_binary + self.executable = executable self.backup_name = backup_name def run_command(self, cmd, **kwargs): @@ -1037,6 +1068,7 @@ def __init__( self, module, is_binary=False, + executable=False, common_file_args=None, backup_name=None, ): @@ -1054,7 +1086,7 @@ def __init__( backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( - module, is_binary=is_binary, backup_name=backup_name + module, is_binary=is_binary, executable=executable, backup_name=backup_name ) self.common_file_args = common_file_args @@ -1089,6 +1121,9 @@ def copy_to_uss( self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) + if self.executable: + status = os.stat(dest) + os.chmod(dest, status.st_mode | stat.S_IEXEC) else: norm_dest = os.path.normpath(dest) dest_parent_dir, tail = os.path.split(norm_dest) @@ -1157,6 +1192,9 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): copy.copy_uss2uss_binary(new_src, dest) else: shutil.copy(new_src, dest) + if self.executable: + status = os.stat(dest) + os.chmod(dest, status.st_mode | stat.S_IEXEC) except OSError as err: raise CopyOperationError( msg="Destination {0} is not writable".format(dest), @@ -1331,9 +1369,16 @@ def _mvs_copy_to_uss( os.mkdir(dest) except FileExistsError: pass + opts = dict() + if self.executable: + opts["options"] = "-IX" + try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: - response = datasets._copy(src, dest) + if self.executable: + response = datasets._copy(src, dest, None, **opts) + else: + response = datasets._copy(src, dest) if response.rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), @@ -1352,6 +1397,7 @@ def __init__( self, module, is_binary=False, + executable=False, backup_name=None ): """ Utility class to handle copying to partitioned data sets or @@ -1369,6 +1415,7 @@ def __init__( super().__init__( module, is_binary=is_binary, + executable=executable, backup_name=backup_name ) @@ -1404,6 +1451,7 @@ def copy_to_pdse( dest_members = [] if src_ds_type == "USS": + if os.path.isfile(new_src): path = os.path.dirname(new_src) files = [os.path.basename(new_src)] @@ -1411,7 +1459,7 @@ def copy_to_pdse( path, dirs, files = next(os.walk(new_src)) src_members = [ - os.path.normpath("{0}/{1}".format(path, file)) if self.is_binary + os.path.normpath("{0}/{1}".format(path, file)) if (self.is_binary or self.executable) else normalize_line_endings("{0}/{1}".format(path, file), encoding) for file in files ] @@ -1493,21 +1541,12 @@ def copy_to_member( if self.is_binary: opts["options"] = "-B" + if self.executable: + opts["options"] = "-IX" + response = datasets._copy(src, dest, None, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response - if rc != 0: - # ***************************************************************** - # An error occurs while attempting to write a data set member to a - # PDSE containing program object members, a PDSE cannot contain - # both program object members and data members. This can be - # resolved by copying the program object with a "-X" flag. - # ***************************************************************** - if ("FSUM8976" in err and "EDC5091I" in err) or ("FSUM8976" in out and "EDC5091I" in out): - opts["options"] = "-X" - response = datasets._copy(src, dest, None, **opts) - rc, out, err = response.rc, response.stdout_response, response.stderr_response - return dict( rc=rc, out=out, @@ -1710,7 +1749,8 @@ def is_compatible( copy_member, src_member, is_src_dir, - is_src_inline + is_src_inline, + executable ): """Determine whether the src and dest are compatible and src can be copied to dest. @@ -1722,6 +1762,7 @@ def is_compatible( src_member {bool} -- Whether src is a data set member. is_src_dir {bool} -- Whether the src is a USS directory. is_src_inline {bool} -- Whether the src comes from inline content. + executable {bool} -- Whether the src is a executable to be copied. Returns: {bool} -- Whether src can be copied to dest. @@ -1733,6 +1774,14 @@ def is_compatible( if dest_type is None: return True + # ******************************************************************** + # If source or destination is a sequential data set and executable as true + # is incompatible to execute the copy. + # ******************************************************************** + if executable: + if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: + return False + # ******************************************************************** # If source is a sequential data set, then destination must be # partitioned data set member, other sequential data sets or USS files. @@ -1968,6 +2017,7 @@ def allocate_destination_data_set( dest_exists, force, is_binary, + executable, dest_data_set=None, volume=None ): @@ -1983,6 +2033,7 @@ def allocate_destination_data_set( dest_exists (bool) -- Whether the destination data set already exists. force (bool) -- Whether to replace an existent data set. is_binary (bool) -- Whether the data set will contain binary data. + executable (bool) -- Whether the data to copy is an executable dataset or file. dest_data_set (dict, optional) -- Parameters containing a full definition of the new data set; they will take precedence over any other allocation logic. volume (str, optional) -- Volume where the data set should be allocated into. @@ -2007,6 +2058,7 @@ def allocate_destination_data_set( return False, dest_params # Giving more priority to the parameters given by the user. + # Cover case the user set executable to true to create dataset valid. if dest_data_set: dest_params = dest_data_set dest_params["name"] = dest @@ -2033,33 +2085,59 @@ def allocate_destination_data_set( elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED and not dest_exists: # Taking the src as model if it's also a PDSE. if src_ds_type in data_set.DataSet.MVS_PARTITIONED: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + if executable: + src_attributes = datasets.listing(src_name)[0] + size = int(src_attributes.total_space) + record_format = "U" + record_length = 0 + + dest_params = get_data_set_attributes( + dest, + size, + is_binary, + record_format=record_format, + record_length=record_length, + type="LIBRARY", + volume=volume + ) + data_set.DataSet.ensure_present(replace=force, **dest_params) + else: + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.listing(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) record_format = src_attributes.recfm record_length = int(src_attributes.lrecl) - - dest_params = get_data_set_attributes(dest, size, is_binary, record_format=record_format, record_length=record_length, type="PDSE", volume=volume) + dest_params = get_data_set_attributes(dest, size, is_binary, record_format=record_format, record_length=record_length, type="PDSE", + volume=volume) data_set.DataSet.ensure_present(replace=force, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. size = os.stat(src).st_size record_format = record_length = None + type_ds = "PDSE" - if not is_binary: + if is_binary: + record_format = "FB" + record_length = 80 + else: record_format = "FB" record_length = get_file_record_length(src) + if executable: + record_format = "U" + record_length = 0 + type_ds = "LIBRARY" + dest_params = get_data_set_attributes( dest, size, is_binary, record_format=record_format, record_length=record_length, - type="PDSE", + type=type_ds, volume=volume ) else: @@ -2182,6 +2260,7 @@ def run_module(module, arg_def): dest = module.params.get('dest') remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') + executable = module.params.get('executable') backup = module.params.get('backup') backup_name = module.params.get('backup_name') validate = module.params.get('validate') @@ -2362,7 +2441,8 @@ def run_module(module, arg_def): copy_member, src_member, is_src_dir, - (src_ds_type == "USS" and src is None) + (src_ds_type == "USS" and src is None), + executable ): module.fail_json( msg="Incompatible target type '{0}' for source '{1}'".format( @@ -2465,6 +2545,7 @@ def run_module(module, arg_def): dest_exists, force, is_binary, + executable, dest_data_set=dest_data_set, volume=volume ) @@ -2492,6 +2573,7 @@ def run_module(module, arg_def): copy_handler = CopyHandler( module, is_binary=is_binary, + executable=executable, backup_name=backup_name ) @@ -2510,6 +2592,7 @@ def run_module(module, arg_def): uss_copy_handler = USSCopyHandler( module, is_binary=is_binary, + executable=executable, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, ) @@ -2573,7 +2656,7 @@ def run_module(module, arg_def): temp_path = os.path.join(temp_path, os.path.basename(src)) pdse_copy_handler = PDSECopyHandler( - module, is_binary=is_binary, backup_name=backup_name + module, is_binary=is_binary, executable=executable, backup_name=backup_name ) pdse_copy_handler.copy_to_pdse( @@ -2618,6 +2701,7 @@ def main(): src=dict(type='path'), dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), + executable=dict(type='bool', default=False), encoding=dict( type='dict', required=False, @@ -2718,6 +2802,7 @@ def main(): src=dict(arg_type='data_set_or_path', required=False), dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), + executable=dict(arg_type='bool', required=False, default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), backup_name=dict(arg_type='data_set_or_path', required=False), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 5604527a3..dd0114fae 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -150,6 +150,23 @@ """ +hello_world = """#include <stdio.h> +int main() +{ + printf("Hello World!"); + return 0; +} +""" + +call_c_hello_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/c/hello_world +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + c_pgm="""#include <stdio.h> #include <stdlib.h> #include <string.h> @@ -298,8 +315,6 @@ def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): dest="/tmp/link.jcl", force=True, ) - for res in cp_res.contacted.values(): - print("copy link program result {0}".format(res)) # Link the temp ds with ds_name job_result = hosts.all.zos_job_submit( src="/tmp/link.jcl", @@ -307,7 +322,7 @@ def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): wait_time_s=60 ) for result in job_result.contacted.values(): - print("link job submit result {0}".format(result)) + #print("link job submit result {0}".format(result)) rc = result.get("jobs")[0].get("ret_code").get("code") finally: hosts.all.file(path=temp_jcl, state="absent") @@ -1551,8 +1566,7 @@ def test_copy_dest_lock(ansible_zos_module): results = hosts.all.zos_copy( src = DATASET_2 + "({0})".format(MEMBER_1), dest = DATASET_1 + "({0})".format(MEMBER_1), - remote_src = True, - force = True + remote_src = True ) for result in results.contacted.values(): print(result) @@ -2493,13 +2507,15 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): @pytest.mark.pdse -def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): +@pytest.mark.parametrize("is_created", ["true", "false"]) +def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_created): hosts = ansible_zos_module # The volume for this dataset should use a system symbol. # This dataset and member should be available on any z/OS system. src = "USER.LOAD.SRC" dest = "USER.LOAD.DEST" cobol_pds = "USER.COBOL.SRC" + uss_dest = "/tmp/HELLO" try: hosts.all.zos_data_set( name=src, @@ -2512,18 +2528,18 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): space_type="M", replace=True ) - - hosts.all.zos_data_set( - name=dest, - state="present", - type="pdse", - record_format="U", - record_length=0, - block_size=32760, - space_primary=2, - space_type="M", - replace=True - ) + if is_created: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) hosts.all.zos_data_set( name=cobol_pds, @@ -2539,7 +2555,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): cobol_pds = "{0}({1})".format(cobol_pds, member) rc = hosts.all.zos_copy( content=COBOL_SRC, - dest=cobol_pds, + dest=cobol_pds ) dest_name = "{0}({1})".format(dest, member) src_name = "{0}({1})".format(src, member) @@ -2561,11 +2577,12 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): ) for result in exec_res.contacted.values(): assert result.get("rc") == 0 - + # Execute the copy from pdse to another with executable and validate it copy_res = hosts.all.zos_copy( - src="{0}({1})".format(src, member), - dest="{0}({1})".format(dest, "MEM1"), - remote_src=True) + src="{0}({1})".format(src, member), + dest="{0}({1})".format(dest, "MEM1"), + remote_src=True, + executable=True) verify_copy = hosts.all.shell( cmd="mls {0}".format(dest), @@ -2583,11 +2600,102 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): assert stdout is not None # number of members assert len(stdout.splitlines()) == 2 + # Copy to a uss file executable from the library execute and validate + copy_uss_res = hosts.all.zos_copy( + src="{0}({1})".format(dest, "MEM1"), + dest=uss_dest, + remote_src=True, + executable=True, + force=True) + + for result in copy_uss_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + verify_exe_uss = hosts.all.shell( + cmd="{0}".format(uss_dest) + ) + + for v_cp_u in verify_exe_uss.contacted.values(): + assert v_cp_u.get("rc") == 0 + stdout = v_cp_u.get("stdout") + assert "SIMPLE HELLO WORLD" in str(stdout) finally: hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_data_set(name=src, state="absent") hosts.all.zos_data_set(name=cobol_pds, state="absent") + hosts.all.file(name=uss_dest, state="absent") + + +@pytest.mark.pdse +@pytest.mark.uss +@pytest.mark.parametrize("is_created", ["true", "false"]) +def test_copy_executables_uss_to_member(ansible_zos_module, is_created): + hosts= ansible_zos_module + src= "/tmp/c/hello_world.c" + src_jcl_call= "/tmp/c/call_hw_pgm.jcl" + dest_uss="/tmp/c/hello_world_2" + dest = "USER.LOAD.DEST" + member = "HELLOSRC" + try: + hosts.all.zos_copy(content=hello_world, dest=src, force=True) + hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) + hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") + hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) + verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") + for res in verify_exe_src.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) + copy_uss_res = hosts.all.zos_copy( + src="/tmp/c/hello_world", + dest=dest_uss, + remote_src=True, + executable=True, + force=True + ) + verify_exe_dst = hosts.all.shell(cmd="/tmp/c/hello_world_2") + for result in copy_uss_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + for res in verify_exe_dst.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) + if is_created: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + copy_uss_to_mvs_res = hosts.all.zos_copy( + src="/tmp/c/hello_world", + dest="{0}({1})".format(dest, member), + remote_src=True, + executable=True, + force=True + ) + cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" + exec_res = hosts.all.shell( + cmd=cmd.format(member, dest) + ) + for result in copy_uss_to_mvs_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + for res in exec_res.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) + finally: + hosts.all.shell(cmd='rm -r /tmp/c') + hosts.all.zos_data_set(name=dest, state="absent") @pytest.mark.pdse From 21b5008da8e041cfb42ab8efef2aac9621c0c35a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 29 Aug 2023 15:45:48 -0600 Subject: [PATCH 169/413] Updated mounts.evn with latests zoau mounts --- scripts/mounts.env | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/mounts.env b/scripts/mounts.env index 876876cd3..aa325383c 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -39,7 +39,9 @@ zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ "11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ "12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ "13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ -"14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +"14:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ +"15:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V125.ZFS "\ +"16:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE From 3a8c32e36487269e0fd020d6966ebbcced8c212f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 29 Aug 2023 15:52:27 -0600 Subject: [PATCH 170/413] Added changelog --- changelogs/fragments/959-ac-tool-update-mounts.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 changelogs/fragments/959-ac-tool-update-mounts.yml diff --git a/changelogs/fragments/959-ac-tool-update-mounts.yml b/changelogs/fragments/959-ac-tool-update-mounts.yml new file mode 100644 index 000000000..4eb90122d --- /dev/null +++ b/changelogs/fragments/959-ac-tool-update-mounts.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Add ZOAU 1.2.4 and 1.2.5 mounts. + (https://github.com/ansible-collections/ibm_zos_core/pull/959) \ No newline at end of file From 8506b623d0039a7dd03787637a15fdf69c96a41d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 30 Aug 2023 13:16:55 -0600 Subject: [PATCH 171/413] Bugfix/815/zos job submit truncates final character of input (#952) * Add first version of the test * Check it * Remove line * Add fragment --- ...os-job-submit-truncate-final-character.yml | 4 ++++ .../modules/test_zos_job_submit_func.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 changelogs/fragments/952-zos-job-submit-truncate-final-character.yml diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml new file mode 100644 index 000000000..b9413e31b --- /dev/null +++ b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml @@ -0,0 +1,4 @@ +bugfix: +- zos_job_submit: The last line of the jcl was missing in the input. + Fix now ensures the presence of the full input in job_submit. + (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index b7b1ec5f0..b93b448c7 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -254,6 +254,10 @@ // """ +JCL_FULL_INPUT="""//HLQ0 JOB MSGLEVEL=(1,1), +// MSGCLASS=A,CLASS=A,NOTIFY=&SYSUID +//STEP1 EXEC PGM=BPXBATCH,PARM='PGM /bin/sleep 5'""" + TEMP_PATH = "/tmp/jcl" DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -612,6 +616,24 @@ def test_job_submit_jinja_template(ansible_zos_module, args): os.remove(tmp_file.name) +def test_job_submit_full_input(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FULL_INPUT), TEMP_PATH) + ) + results = hosts.all.zos_job_submit( + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + ) + for result in results.contacted.values(): + print(result) + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: From 2d92df5b8366007de1b8dea15bd4db0223d67035 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 4 Sep 2023 14:59:00 -0600 Subject: [PATCH 172/413] =?UTF-8?q?Add=20fix=20for=20change=20copy=20built?= =?UTF-8?q?-in=20for=20zos=5Fcopy=20and=20remove=20remain=20files=E2=80=A6?= =?UTF-8?q?=20(#951)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add fix for change copy built-in for zos_copy and remove remain files in ansible/temp * Add fragment * Change spaces * Remove deletes * Remove temp files * Change fragment and update if zos_copy fails --- ...or-zos-copy-and-remove-temporary-files.yml | 7 +++ plugins/action/zos_job_submit.py | 48 +++++++++++++------ 2 files changed, 40 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml diff --git a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml new file mode 100644 index 000000000..c90921c9f --- /dev/null +++ b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml @@ -0,0 +1,7 @@ +bugfixes: + - zos_job_submit: Temporary files were created in tmp directory. + Fix now ensures the deletion of files every time the module run. + (https://github.com/ansible-collections/ibm_zos_core/pull/951) +minor_changes: + - zos_job_submit: Change action plugin call from copy to zos_copy. + (https://github.com/ansible-collections/ibm_zos_core/pull/951) \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 715ce57ed..db3fb1fd7 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -15,12 +15,18 @@ from ansible.plugins.action import ActionBase from ansible.errors import AnsibleError, AnsibleFileNotFound +from ansible.utils.display import Display # from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.parsing.convert_bool import boolean import os +import copy from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template +from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule + + +display = Display() class ActionModule(ActionBase): @@ -148,26 +154,38 @@ def run(self, tmp=None, task_vars=None): src=tmp_src, dest=dest_path, mode="0600", - _original_basename=source_rel, - ) - ) - result.update( - self._execute_module( - module_name="copy", - module_args=copy_module_args, - task_vars=task_vars, + force=True, + remote_src=True, ) ) - result.update( - self._execute_module( - module_name="ibm.ibm_zos_core.zos_job_submit", - module_args=module_args, - task_vars=task_vars, + copy_task = copy.deepcopy(self._task) + copy_task.args = copy_module_args + zos_copy_action_module = ZosCopyActionModule(task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj) + result.update(zos_copy_action_module.run(task_vars=task_vars)) + if result.get("msg") is None: + module_args["src"] = dest_path + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_job_submit", + module_args=module_args, + task_vars=task_vars, + ) ) - ) - + else: + result.update(dict(failed=True)) if rendered_file: os.remove(rendered_file) + if os.path.isfile(tmp_src): + self._connection.exec_command("rm -rf {0}".format(tmp_src)) + if os.path.isfile(dest_file): + self._connection.exec_command("rm -rf {0}".format(dest_file)) + if os.path.isfile(source_full): + self._connection.exec_command("rm -rf {0}".format(source_full)) else: result.update( From f5ec02009f4caa8c90d1d7fb66ee4ddbacbb5afe Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 8 Sep 2023 18:23:06 -0600 Subject: [PATCH 173/413] Corrected changelog tag from bugfix to bugfixes (#963) --- .../fragments/916-zos-lineinfile-does-not-behave-community.yml | 2 +- .../fragments/918-zos-operator-response-come-back-truncate.yaml | 2 +- .../fragments/952-zos-job-submit-truncate-final-character.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml index c1639c769..9b13df055 100644 --- a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml +++ b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) \ No newline at end of file diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml index ef5ae8b36..58900fc01 100644 --- a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml +++ b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_operator: The last line of the operator was missing in the response of the module. Fix now ensures the presence of the full output of the operator. (https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml index b9413e31b..aca865791 100644 --- a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml +++ b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_job_submit: The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file From 39b439c204f9adbfcdf4ddc4921f6b41cd1dd9f7 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Tue, 12 Sep 2023 12:04:32 -0700 Subject: [PATCH 174/413] modify get_data_set_attributes function (#964) * modify get_data_set_attributes function to honor incoming param values instead of overwriting in the case of is_binary=True Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...odify-get_data_set_attributes-function.yml | 3 +++ plugins/modules/zos_copy.py | 22 ++++++++++++++----- 2 files changed, 19 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/964-modify-get_data_set_attributes-function.yml diff --git a/changelogs/fragments/964-modify-get_data_set_attributes-function.yml b/changelogs/fragments/964-modify-get_data_set_attributes-function.yml new file mode 100644 index 000000000..da384c77b --- /dev/null +++ b/changelogs/fragments/964-modify-get_data_set_attributes-function.yml @@ -0,0 +1,3 @@ +trivial: +- zos_copy - modify get_data_set_attributes helper function to no longer overwrite caller-defined attributes. + (https://github.com/ansible-collections/ibm_zos_core/pull/964) \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index c50fe8c64..aabd5447e 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1613,8 +1613,8 @@ def get_data_set_attributes( name, size, is_binary, - record_format="VB", - record_length=1028, + record_format=None, + record_length=None, type="SEQ", volume=None ): @@ -1649,11 +1649,21 @@ def get_data_set_attributes( space_primary = space_primary + int(math.ceil(space_primary * 0.05)) space_secondary = int(math.ceil(space_primary * 0.10)) - # Overwriting record_format and record_length when the data set has binary data. - if is_binary: - record_format = "FB" - record_length = 80 + # set default value - record_format + if record_format is None: + if is_binary: + record_format = "FB" + else: + record_format = "VB" + + # set default value - record_length + if record_length is None: + if is_binary: + record_length = 80 + else: + record_length = 1028 + # compute block size max_block_size = 32760 if record_format == "FB": # Computing the biggest possible block size that doesn't exceed From 81c1f88ee0dbc7ea119dc4bd6579525c93942c26 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 12 Sep 2023 14:14:51 -0600 Subject: [PATCH 175/413] Add python 3.11-3 to ac mount tables (#966) * Add python 3.11-3 to mount tables Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added changelog fragment Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- changelogs/fragments/966-ac-tool-add-python-311-3.yml | 3 +++ scripts/mounts.env | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/966-ac-tool-add-python-311-3.yml diff --git a/changelogs/fragments/966-ac-tool-add-python-311-3.yml b/changelogs/fragments/966-ac-tool-add-python-311-3.yml new file mode 100644 index 000000000..231d3e2be --- /dev/null +++ b/changelogs/fragments/966-ac-tool-add-python-311-3.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Add python 3.11-3 mount table. + (https://github.com/ansible-collections/ibm_zos_core/pull/966) \ No newline at end of file diff --git a/scripts/mounts.env b/scripts/mounts.env index aa325383c..050887102 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -58,7 +58,8 @@ python_mount_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz:/allpyt "3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz:/allpython/3.9:IMSTESTU.PYZ.V39016.ZFS "\ "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz:/allpython/3.10:IMSTESTU.PYZ.V3A09.ZFS "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11:IMSTESTU.PYZ.V3B02.ZFS "\ -"6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS " +"6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS "\ +"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-3:IMSTESTU.PYZ.V3B03.ZFS " # ------------------------------------------------------------------------------ # PYTHON PATH POINTS @@ -75,4 +76,5 @@ python_path_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz "\ "3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz "\ "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz "\ -"6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file +"6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz "\ +"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file From 0ce455c484dccc90ada9582957baa1561743e097 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 13 Sep 2023 16:36:03 -0600 Subject: [PATCH 176/413] Enhance zos_archive and zos_unarchive test cases (#965) * Added test_lines specific to the record length * Modified test to add characters length to test Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Enhanced zos_unarchive test cases Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added changelog Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added data integrity check --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/965-enhance-archive-tests.yml | 5 +++ .../modules/test_zos_archive_func.py | 26 ++++++++------ .../modules/test_zos_unarchive_func.py | 36 +++++++++++++++---- 3 files changed, 50 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/965-enhance-archive-tests.yml diff --git a/changelogs/fragments/965-enhance-archive-tests.yml b/changelogs/fragments/965-enhance-archive-tests.yml new file mode 100644 index 000000000..b86bf22bf --- /dev/null +++ b/changelogs/fragments/965-enhance-archive-tests.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_archive: Enhanced test cases to use test lines the same length of the record length. + (https://github.com/ansible-collections/ibm_zos_core/pull/965) + - zos_unarchive: Enhanced test cases to use test lines the same length of the record length. + (https://github.com/ansible-collections/ibm_zos_core/pull/965) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 9d92134e5..2705a7137 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -347,7 +347,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["FB", "VB"], ) def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -372,8 +372,12 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -419,7 +423,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["FB", "VB"], ) def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -444,8 +448,12 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -487,10 +495,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) -@pytest.mark.parametrize( - "record_length", [80], -) -def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length): +def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module # Clean env @@ -501,7 +506,6 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d name=data_set.get("name"), type=data_set.get("dstype"), state="present", - record_length=record_length, record_format="FB", replace=True, ) diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 831724f21..46a1e8534 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -344,8 +344,12 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -397,6 +401,11 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): assert data_set.get("name") in c_result.get("stdout") + + # Check data integrity after unarchive + cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") + for result in cat_result.contacted.values(): + assert result.get("stdout") == test_line finally: hosts.all.zos_data_set(name=data_set.get("name"), state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") @@ -442,8 +451,12 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -930,8 +943,12 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -981,6 +998,13 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): assert data_set.get("name") in c_result.get("stdout") + + # Check data integrity after unarchive + cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") + for result in cat_result.contacted.values(): + assert result.get("stdout") == test_line + + finally: hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") From e2ad0ee14c9a4132563b262431af0602ff19fe30 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 14 Sep 2023 14:56:55 -0400 Subject: [PATCH 177/413] Initial commit to add LIBRARY to choices and docs --- changelogs/fragments/920-zos-copy-add-library-choice.yml | 4 ++++ plugins/modules/zos_copy.py | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/920-zos-copy-add-library-choice.yml diff --git a/changelogs/fragments/920-zos-copy-add-library-choice.yml b/changelogs/fragments/920-zos-copy-add-library-choice.yml new file mode 100644 index 000000000..cb30191c1 --- /dev/null +++ b/changelogs/fragments/920-zos-copy-add-library-choice.yml @@ -0,0 +1,4 @@ +bugfixes: +- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. + Documentation updated to reflect this change. + (https://github.com/ansible-collections/ibm_zos_core/pull/). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index aabd5447e..b87845fab 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -283,6 +283,7 @@ - PDSE - MEMBER - BASIC + - LIBRARY space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -2741,7 +2742,7 @@ def main(): type=dict( type='str', choices=['BASIC', 'KSDS', 'ESDS', 'RRDS', - 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER'], + 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER', 'LIBRARY'], required=True, ), space_primary=dict( From 7515e8ce4a2354fc6eb3b441a9cc469909db6328 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 14 Sep 2023 15:01:50 -0400 Subject: [PATCH 178/413] added PR value to fragment --- changelogs/fragments/920-zos-copy-add-library-choice.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/920-zos-copy-add-library-choice.yml b/changelogs/fragments/920-zos-copy-add-library-choice.yml index cb30191c1..2d339227b 100644 --- a/changelogs/fragments/920-zos-copy-add-library-choice.yml +++ b/changelogs/fragments/920-zos-copy-add-library-choice.yml @@ -1,4 +1,4 @@ bugfixes: - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. - (https://github.com/ansible-collections/ibm_zos_core/pull/). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/968). \ No newline at end of file From 0be6c693be3bfb06f84e8f54d16f6df2b71a3812 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 19 Sep 2023 10:09:37 -0600 Subject: [PATCH 179/413] Simplify loaldlib test cases (#969) * Simplify loaldlib test cases * Add fragment * Add link for PR * Remove identation --- .../969-Simplify_loadlib_test_cases.yml | 3 + .../functional/modules/test_zos_copy_func.py | 184 ++++++++++++------ 2 files changed, 124 insertions(+), 63 deletions(-) create mode 100644 changelogs/fragments/969-Simplify_loadlib_test_cases.yml diff --git a/changelogs/fragments/969-Simplify_loadlib_test_cases.yml b/changelogs/fragments/969-Simplify_loadlib_test_cases.yml new file mode 100644 index 000000000..ce2060ed8 --- /dev/null +++ b/changelogs/fragments/969-Simplify_loadlib_test_cases.yml @@ -0,0 +1,3 @@ +trivial: +- zos_copy - Divide large test case for loadlibs and simplify functions. + (https://github.com/ansible-collections/ibm_zos_core/pull/969) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index dd0114fae..2bcf59a21 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -328,6 +328,32 @@ def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): hosts.all.file(path=temp_jcl, state="absent") return rc +def generate_executable_ds(hosts, src, dest, cobol): + member = "HELLOSRC" + hosts.all.zos_copy(content=COBOL_SRC, dest=cobol) + dest_name = "{0}({1})".format(dest, member) + src_name = "{0}({1})".format(src, member) + rc = link_loadlib_from_cobol(hosts, dest_name, cobol) + assert rc == 0 + cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" + hosts.all.shell(cmd=cmd.format(member, dest)) + rc = link_loadlib_from_cobol(hosts, src_name, cobol) + hosts.all.shell(cmd=cmd.format(member, src)) + assert rc == 0 + exec_res = hosts.all.shell(cmd=cmd.format(member, src)) + for result in exec_res.contacted.values(): + assert result.get("rc") == 0 + +def generate_executable_uss(hosts, src, src_jcl_call): + hosts.all.zos_copy(content=hello_world, dest=src, force=True) + hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) + hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") + hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) + verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") + for res in verify_exe_src.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) @pytest.mark.uss @pytest.mark.parametrize("src", [ @@ -2515,7 +2541,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr src = "USER.LOAD.SRC" dest = "USER.LOAD.DEST" cobol_pds = "USER.COBOL.SRC" - uss_dest = "/tmp/HELLO" + dest_exe = "USER.LOAD.EXE" try: hosts.all.zos_data_set( name=src, @@ -2528,19 +2554,17 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr space_type="M", replace=True ) - if is_created: - hosts.all.zos_data_set( - name=dest, - state="present", - type="pdse", - record_format="U", - record_length=0, - block_size=32760, - space_primary=2, - space_type="M", - replace=True - ) - + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) hosts.all.zos_data_set( name=cobol_pds, state="present", @@ -2553,61 +2577,94 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr ) member = "HELLOSRC" cobol_pds = "{0}({1})".format(cobol_pds, member) - rc = hosts.all.zos_copy( - content=COBOL_SRC, - dest=cobol_pds - ) - dest_name = "{0}({1})".format(dest, member) - src_name = "{0}({1})".format(src, member) - # both src and dest need to be a loadlib - rc = link_loadlib_from_cobol(hosts, dest_name, cobol_pds) - assert rc == 0 - # make sure is executable - cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" - exec_res = hosts.all.shell( - cmd=cmd.format(member, dest) - ) - for result in exec_res.contacted.values(): - assert result.get("rc") == 0 - rc = link_loadlib_from_cobol(hosts, src_name, cobol_pds) - assert rc == 0 - - exec_res = hosts.all.shell( - cmd=cmd.format(member, src) - ) - for result in exec_res.contacted.values(): - assert result.get("rc") == 0 - # Execute the copy from pdse to another with executable and validate it + generate_executable_ds(hosts, src, dest, cobol_pds) + if is_created: + hosts.all.zos_data_set( + name=dest_exe, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) copy_res = hosts.all.zos_copy( src="{0}({1})".format(src, member), - dest="{0}({1})".format(dest, "MEM1"), + dest="{0}({1})".format(dest_exe, "MEM1"), remote_src=True, executable=True) verify_copy = hosts.all.shell( - cmd="mls {0}".format(dest), + cmd="mls {0}".format(dest_exe), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - assert result.get("dest") == "{0}({1})".format(dest, "MEM1") + assert result.get("dest") == "{0}({1})".format(dest_exe, "MEM1") for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 stdout = v_cp.get("stdout") assert stdout is not None - # number of members - assert len(stdout.splitlines()) == 2 - # Copy to a uss file executable from the library execute and validate + finally: + hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=cobol_pds, state="absent") + +@pytest.mark.pdse +@pytest.mark.uss +def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): + hosts = ansible_zos_module + src = "USER.LOAD.SRC" + dest = "USER.LOAD.DEST" + cobol_pds = "USER.COBOL.SRC" + uss_dest = "/tmp/HELLO" + try: + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + hosts.all.zos_data_set( + name=cobol_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + member = "HELLOSRC" + cobol_pds = "{0}({1})".format(cobol_pds, member) + generate_executable_ds(hosts, src, dest, cobol_pds) copy_uss_res = hosts.all.zos_copy( - src="{0}({1})".format(dest, "MEM1"), + src="{0}({1})".format(src, member), dest=uss_dest, remote_src=True, executable=True, force=True) - for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True @@ -2615,12 +2672,10 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr verify_exe_uss = hosts.all.shell( cmd="{0}".format(uss_dest) ) - for v_cp_u in verify_exe_uss.contacted.values(): assert v_cp_u.get("rc") == 0 stdout = v_cp_u.get("stdout") assert "SIMPLE HELLO WORLD" in str(stdout) - finally: hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_data_set(name=src, state="absent") @@ -2628,26 +2683,14 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.file(name=uss_dest, state="absent") -@pytest.mark.pdse @pytest.mark.uss -@pytest.mark.parametrize("is_created", ["true", "false"]) -def test_copy_executables_uss_to_member(ansible_zos_module, is_created): +def test_copy_executables_uss_to_uss(ansible_zos_module): hosts= ansible_zos_module src= "/tmp/c/hello_world.c" src_jcl_call= "/tmp/c/call_hw_pgm.jcl" dest_uss="/tmp/c/hello_world_2" - dest = "USER.LOAD.DEST" - member = "HELLOSRC" try: - hosts.all.zos_copy(content=hello_world, dest=src, force=True) - hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) - hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") - hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) - verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") - for res in verify_exe_src.contacted.values(): - assert res.get("rc") == 0 - stdout = res.get("stdout") - assert "Hello World" in str(stdout) + generate_executable_uss(hosts, src, src_jcl_call) copy_uss_res = hosts.all.zos_copy( src="/tmp/c/hello_world", dest=dest_uss, @@ -2663,6 +2706,21 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): assert res.get("rc") == 0 stdout = res.get("stdout") assert "Hello World" in str(stdout) + finally: + hosts.all.shell(cmd='rm -r /tmp/c') + + +@pytest.mark.pdse +@pytest.mark.uss +@pytest.mark.parametrize("is_created", ["true", "false"]) +def test_copy_executables_uss_to_member(ansible_zos_module, is_created): + hosts= ansible_zos_module + src= "/tmp/c/hello_world.c" + src_jcl_call= "/tmp/c/call_hw_pgm.jcl" + dest = "USER.LOAD.DEST" + member = "HELLOSRC" + try: + generate_executable_uss(hosts, src, src_jcl_call) if is_created: hosts.all.zos_data_set( name=dest, From dfe1ba8abc883cfb259825d8167d07077c4cb67d Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 19 Sep 2023 13:13:23 -0400 Subject: [PATCH 180/413] Change implemented in zos_operator Working on zos_operator_action_query Added initial changelog fragment --- ...nhance-Add-wait-zos-operator-and-query.yml | 5 +++++ plugins/modules/zos_operator.py | 22 +++++++++---------- plugins/modules/zos_operator_action_query.py | 2 ++ 3 files changed, 18 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml new file mode 100644 index 000000000..4067471dc --- /dev/null +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -0,0 +1,5 @@ +enhancements: + - zos_operator: Added the 'wait' parameter back in to use the new -w operator. + (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) + - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. + (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 5bd04ba50..29fc25817 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -57,14 +57,11 @@ default: 1 wait: description: - - Configuring wait used by the L(zos_operator,./zos_operator.html) module - has been deprecated and will be removed in a future ibm.ibm_zos_core - collection. - - Setting this option will yield no change, it is deprecated. - - Review option I(wait_time_s) to instruct operator commands to wait. + - Setting this option will tell opercmd to wait the full wait_time, instead + of returning on first data received type: bool required: false - default: true + default: false """ EXAMPLES = r""" @@ -81,12 +78,13 @@ zos_operator: cmd: "\\$PJ(*)" -- name: Execute operator command to show jobs, waiting up to 5 seconds for response +- name: Execute operator command to show jobs, always waiting 8 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 5 + wait: true -- name: Execute operator command to show jobs, always waiting 7 seconds for response +- name: Execute operator command to show jobs, waiting up to 7 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 7 @@ -195,7 +193,7 @@ def run_module(): cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), wait_time_s=dict(type="int", required=False, default=1), - wait=dict(type="bool", required=False, default=True), + wait=dict(type="bool", required=False, default=False), ) result = dict(changed=False) @@ -266,8 +264,7 @@ def parse_params(params): cmd=dict(arg_type="str", required=True), verbose=dict(arg_type="bool", required=False), wait_time_s=dict(arg_type="int", required=False), - wait=dict(arg_type="bool", required=False, removed_at_date='2022-11-30', - removed_from_collection='ibm.ibm_zos_core'), + wait=dict(arg_type="bool", required=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -286,6 +283,9 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") + if params.get("wait"): + kwargs.update({"wait_arg": True}) + args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 10d096b48..0211f8a4b 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -398,6 +398,8 @@ def handle_conditions(list, condition_type, value): def execute_command(operator_cmd): response = opercmd.execute(operator_cmd) +# response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) + rc = response.rc stdout = response.stdout_response stderr = response.stderr_response From 4961784de3b409eaa319d2de42db91c97a096354 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 11:43:15 -0400 Subject: [PATCH 181/413] cleaned up note in zos_operator Added wait time and wait values to zoaq --- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 62 ++++++++++++++++++-- 2 files changed, 58 insertions(+), 6 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 29fc25817..2dfa12fdb 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -57,7 +57,7 @@ default: 1 wait: description: - - Setting this option will tell opercmd to wait the full wait_time, instead + - Setting this option will tell the system to wait the full wait_time, instead of returning on first data received type: bool required: false diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 0211f8a4b..ddef406eb 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -57,6 +57,26 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false + wait_time_s: + description: + - Set maximum time in seconds to wait for the commands to execute. + - When set to 0, the system default is used. + - This option is helpful on a busy system requiring more time to execute + commands. + - Setting I(wait) can instruct if execution should wait the + full I(wait_time_s). + - Because 2 functions are called, potential time delay is doubled. + type: int + required: false + default: 1 + wait: + description: + - Setting this option will tell the system to wait the full wait_time, instead + of returning on first data received + - Because 2 functions are called, potential time delay is doubled. + type: bool + required: false + default: false message_filter: description: - Return outstanding messages requiring operator action awaiting a @@ -101,6 +121,19 @@ zos_operator_action_query: job_name: im5* +- name: Display all outstanding messages whose job name begin with im7, + wait up to 10 seconds per call (20 seconds overall) for data + zos_operator_action_query: + job_name: im7* + wait_time_s: 10 + +- name: Display all outstanding messages whose job name begin with im9, + wait up a full 15 seconds per call (30 seconds overall) for data + zos_operator_action_query: + job_name: im9* + wait_time_s: 15 + wait: True + - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: message_id: dsi* @@ -235,6 +268,8 @@ def run_module(): system=dict(type="str", required=False), message_id=dict(type="str", required=False), job_name=dict(type="str", required=False), + wait_time_s=dict(type="int", required=False, default=1), + wait=dict(type="bool", required=False, default=False), message_filter=dict( type="dict", required=False, @@ -251,7 +286,19 @@ def run_module(): try: new_params = parse_params(module.params) - cmd_result_a = execute_command("d r,a,s") + kwargs = {} + + wait_s = params.get("wait_time_s") + + if new_params.get("wait"): + kwargs.update({"wait_arg": True}) + + args = [] + + cmdtxt = "d r,a,s" + + cmd_result_a = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + if cmd_result_a.rc > 0: module.fail_json( msg="A non-zero return code was received while querying the operator.", @@ -263,7 +310,10 @@ def run_module(): cmd="d r,a,s", ) - cmd_result_b = execute_command("d r,a,jn") + cmdtxt = new_params.get("d r,a,jn") + + cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + if cmd_result_b.rc > 0: module.fail_json( msg="A non-zero return code was received while querying the operator.", @@ -295,6 +345,8 @@ def parse_params(params): system=dict(arg_type=system_type, required=False), message_id=dict(arg_type=message_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), + wait_time_s=dict(arg_type="int", required=False), + wait=dict(arg_type="bool", required=False), message_filter=dict(arg_type=message_filter_type, required=False) ) parser = BetterArgParser(arg_defs) @@ -395,10 +447,10 @@ def handle_conditions(list, condition_type, value): return newlist -def execute_command(operator_cmd): +def execute_command(operator_cmd, timeout=1, *args, **kwargs): - response = opercmd.execute(operator_cmd) -# response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) + # response = opercmd.execute(operator_cmd) + response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) rc = response.rc stdout = response.stdout_response From 9136715656a6511581278105c30e3dc4850d8ad7 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 11:59:55 -0400 Subject: [PATCH 182/413] corrected pep8/pylint errors --- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 2dfa12fdb..c34d64818 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -284,7 +284,7 @@ def run_operator_command(params): cmdtxt = params.get("cmd") if params.get("wait"): - kwargs.update({"wait_arg": True}) + kwargs.update({"wait_arg": True}) args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ddef406eb..9abd8e493 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -288,7 +288,7 @@ def run_module(): kwargs = {} - wait_s = params.get("wait_time_s") + wait_s = new_params.get("wait_time_s") if new_params.get("wait"): kwargs.update({"wait_arg": True}) From ac8acd5d38095d6bbb435a87916b1766a6edd95e Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 12:48:51 -0400 Subject: [PATCH 183/413] added output to failing test --- .../modules/test_zos_operator_action_query_func.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 4872a2a02..ce60e9588 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -23,14 +23,19 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query() try: + print( "\n\n=========== in no-options loop 1") for action in results.get("actions"): + print( action.get("message_text", "-no-")) if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass + + print( "\n\n=============== in no-options loop 2") for result in results.contacted.values(): - assert result.get("actions") + print( result ) + # assert result.get("actions") def test_zos_operator_action_query_option_message_id(ansible_zos_module): hosts = ansible_zos_module From bb190304e6928dd1d23cf1f0fb684a3ca3c8ce34 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 13:00:40 -0400 Subject: [PATCH 184/413] added another test print, since the test error moved --- tests/functional/modules/test_zos_operator_func.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 84f593f51..6843a5678 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -41,6 +41,8 @@ def test_zos_operator_various_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd=command) for result in results.contacted.values(): + print( "\n\n===result:" ) + print( result ) assert result["rc"] == expected_rc assert result.get("changed") is changed From e7f02a2974274cae243e0fb5e4ec8a165e949817 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 13:22:38 -0400 Subject: [PATCH 185/413] added print and un-commented upper assertion --- .../modules/test_zos_operator_action_query_func.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index ce60e9588..76dbc5d83 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -35,7 +35,7 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): print( "\n\n=============== in no-options loop 2") for result in results.contacted.values(): print( result ) - # assert result.get("actions") + assert result.get("actions") def test_zos_operator_action_query_option_message_id(ansible_zos_module): hosts = ansible_zos_module @@ -48,7 +48,10 @@ def test_zos_operator_action_query_option_message_id(ansible_zos_module): cmd="{0}cancel".format(action.get("number"))) except Exception: pass + + print( "\n\n=============== in msgid loop 2") for result in results.contacted.values(): + print( result ) assert result.get("actions") def test_zos_operator_action_query_option_message_id_invalid_abbreviation( From 15371f669dd1d76a5011b64720d3b328b30a58c8 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 16:14:19 -0400 Subject: [PATCH 186/413] removed embedded print statements, changed \$ to '$ to eliminate deprecation warning --- .../modules/test_zos_operator_action_query_func.py | 9 +-------- tests/functional/modules/test_zos_operator_func.py | 5 +---- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 76dbc5d83..30f5175e4 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -16,25 +16,21 @@ __metaclass__ = type import pytest -import unittest + def test_zos_operator_action_query_no_options(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query() try: - print( "\n\n=========== in no-options loop 1") for action in results.get("actions"): - print( action.get("message_text", "-no-")) if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass - print( "\n\n=============== in no-options loop 2") for result in results.contacted.values(): - print( result ) assert result.get("actions") def test_zos_operator_action_query_option_message_id(ansible_zos_module): @@ -49,9 +45,7 @@ def test_zos_operator_action_query_option_message_id(ansible_zos_module): except Exception: pass - print( "\n\n=============== in msgid loop 2") for result in results.contacted.values(): - print( result ) assert result.get("actions") def test_zos_operator_action_query_option_message_id_invalid_abbreviation( @@ -275,7 +269,6 @@ def test_zos_operator_action_query_option_message_filter_multiple_matches( except Exception: pass for result in results.contacted.values(): - print(result.get("actions")) assert result.get("actions") assert len(result.get("actions")) > 1 diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 6843a5678..5ce87370d 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -41,8 +41,6 @@ def test_zos_operator_various_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd=command) for result in results.contacted.values(): - print( "\n\n===result:" ) - print( result ) assert result["rc"] == expected_rc assert result.get("changed") is changed @@ -114,7 +112,6 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): # assert timediff < 15 for result in results.contacted.values(): - pprint(result) assert result["rc"] == 0 assert result.get("changed") is True assert result.get("content") is not None @@ -124,7 +121,7 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_operator(cmd="\$dspl") + results = hosts.all.zos_operator(cmd='$dspl') # \$ triggers warning res = dict() res["stdout"] = [] for result in results.contacted.values(): From 93573e1a02765cddda81148948aaf1e74bdf4414 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 16:28:16 -0400 Subject: [PATCH 187/413] switch test back to "\$ --- tests/functional/modules/test_zos_operator_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 5ce87370d..4ad07d882 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -121,7 +121,7 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_operator(cmd='$dspl') # \$ triggers warning + results = hosts.all.zos_operator(cmd="\$dspl") res = dict() res["stdout"] = [] for result in results.contacted.values(): From 959e0c3c4cdde6e5b05829e5f1c0be151fbcf947 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 17:43:53 -0400 Subject: [PATCH 188/413] correction to zos_operator_action_query to pass the second query cmd correctly --- plugins/modules/zos_operator_action_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 9abd8e493..4270e33c8 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -310,7 +310,7 @@ def run_module(): cmd="d r,a,s", ) - cmdtxt = new_params.get("d r,a,jn") + cmdtxt = "d r,a,jn" cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) From a97234d40a2a6987ce7e5e49e59a24e0c414caa3 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 21 Sep 2023 09:48:21 -0400 Subject: [PATCH 189/413] Added PR# to changelog tweaked description of new feature in both affected functions. --- .../943-enhance-Add-wait-zos-operator-and-query.yml | 5 +++-- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 4067471dc..59547c8d4 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,5 +1,6 @@ enhancements: - zos_operator: Added the 'wait' parameter back in to use the new -w operator. - (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) + (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. - (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/976) + diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index c34d64818..9df17799f 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -78,7 +78,7 @@ zos_operator: cmd: "\\$PJ(*)" -- name: Execute operator command to show jobs, always waiting 8 seconds for response +- name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 5 diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 4270e33c8..026a8343a 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -128,7 +128,7 @@ wait_time_s: 10 - name: Display all outstanding messages whose job name begin with im9, - wait up a full 15 seconds per call (30 seconds overall) for data + wait a full 15 seconds per call (30 seconds overall) for data zos_operator_action_query: job_name: im9* wait_time_s: 15 From e6321bfb91e0ac815072358c23cf56ccc4fc7b97 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 21 Sep 2023 10:08:03 -0400 Subject: [PATCH 190/413] added changelog for ticket --- .../fragments/920-bug-add-library-feature-documentation.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelogs/fragments/920-bug-add-library-feature-documentation.yml diff --git a/changelogs/fragments/920-bug-add-library-feature-documentation.yml b/changelogs/fragments/920-bug-add-library-feature-documentation.yml new file mode 100644 index 000000000..efafd82bc --- /dev/null +++ b/changelogs/fragments/920-bug-add-library-feature-documentation.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_copy: Add 'LIBRARY' option as a destination dataset suboption. + Update documentation to show this option. + (https://github.com/ansible-collections/ibm_zos_core/pull/968) From 77dae086ecc472bf5948c79931e4c8c282b77f4b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 22 Sep 2023 09:04:37 -0600 Subject: [PATCH 191/413] Modified versions in bug issue template to avoid users picking a non-existing version Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 8a1cd3ccd..d50883065 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -45,8 +45,6 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: false options: - - v3.14.x - - v3.13.x - v3.12.x - v3.11.x - v3.10.x @@ -61,9 +59,6 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 - v1.8.0-beta.1 - v1.7.0 - v1.7.0-beta.1 From ab780d2decc46a43d077046e155aa587319b69f8 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 22 Sep 2023 12:25:32 -0400 Subject: [PATCH 192/413] Added zoau_api_version logic to check for 1.2.5 or later as a condition for wait_arg Added mention of this to documentation of interface --- plugins/modules/zos_operator.py | 24 ++++++++++++++++++-- plugins/modules/zos_operator_action_query.py | 24 ++++++++++++++++++-- 2 files changed, 44 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 9df17799f..76b894425 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -59,6 +59,7 @@ description: - Setting this option will tell the system to wait the full wait_time, instead of returning on first data received + - This option is only available with zoau 1.2.5 or later type: bool required: false default: false @@ -176,6 +177,11 @@ except Exception: opercmd = MissingZOAUImport() +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" + def execute_command(operator_cmd, timeout=1, *args, **kwargs): start = timer() @@ -283,8 +289,22 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") - if params.get("wait"): - kwargs.update({"wait_arg": True}) + zv = ZOAU_API_VERSION.split(".") + getit = False + if( zv[0] > "1"): + getit = True + elif( zv[0] == "1" and zv[1] > "2"): + getit = True + elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + getit = True + + if getit: + if params.get("wait"): + kwargs.update({"wait_arg": True}) + else: + kwargs.pop("wait_arg", "0") + else: + kwargs.pop("wait_arg", "0") args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 026a8343a..0b340f936 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -74,6 +74,7 @@ - Setting this option will tell the system to wait the full wait_time, instead of returning on first data received - Because 2 functions are called, potential time delay is doubled. + - This option is only available with zoau 1.2.5 or later type: bool required: false default: false @@ -262,6 +263,11 @@ except Exception: opercmd = MissingZOAUImport() +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" + def run_module(): module_args = dict( @@ -290,8 +296,22 @@ def run_module(): wait_s = new_params.get("wait_time_s") - if new_params.get("wait"): - kwargs.update({"wait_arg": True}) + zv = ZOAU_API_VERSION.split(".") + getit = False + if( zv[0] > "1"): + getit = True + elif( zv[0] == "1" and zv[1] > "2"): + getit = True + elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + getit = True + + if getit: + if new_params.get("wait"): + kwargs.update({"wait_arg": True}) + else: + kwargs.pop("wait_arg", "0") + else: + kwargs.pop("wait_arg", "0") args = [] From a8f74262389e181ddb961e7d6f31df7407467085 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 22 Sep 2023 12:55:37 -0400 Subject: [PATCH 193/413] corrected pep8 errors --- plugins/modules/zos_operator.py | 14 +++++++------- plugins/modules/zos_operator_action_query.py | 6 +++--- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 76b894425..35f155e65 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -291,18 +291,18 @@ def run_operator_command(params): zv = ZOAU_API_VERSION.split(".") getit = False - if( zv[0] > "1"): + if zv[0] > "1": getit = True - elif( zv[0] == "1" and zv[1] > "2"): + elif zv[0] == "1" and zv[1] > "2": getit = True - elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": getit = True if getit: - if params.get("wait"): - kwargs.update({"wait_arg": True}) - else: - kwargs.pop("wait_arg", "0") + if params.get("wait"): + kwargs.update({"wait_arg": True}) + else: + kwargs.pop("wait_arg", "0") else: kwargs.pop("wait_arg", "0") diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 0b340f936..77d130697 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -298,11 +298,11 @@ def run_module(): zv = ZOAU_API_VERSION.split(".") getit = False - if( zv[0] > "1"): + if zv[0] > "1": getit = True - elif( zv[0] == "1" and zv[1] > "2"): + elif zv[0] == "1" and zv[1] > "2": getit = True - elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": getit = True if getit: From 4d033385b0d1be275623c30ae45fcf0c9ad13628 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 4 Oct 2023 10:11:18 -0400 Subject: [PATCH 194/413] removed redundant changelog fragment --- .../fragments/920-bug-add-library-feature-documentation.yml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 changelogs/fragments/920-bug-add-library-feature-documentation.yml diff --git a/changelogs/fragments/920-bug-add-library-feature-documentation.yml b/changelogs/fragments/920-bug-add-library-feature-documentation.yml deleted file mode 100644 index efafd82bc..000000000 --- a/changelogs/fragments/920-bug-add-library-feature-documentation.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_copy: Add 'LIBRARY' option as a destination dataset suboption. - Update documentation to show this option. - (https://github.com/ansible-collections/ibm_zos_core/pull/968) From 0e2fb96338e0457f9095619f41b6a7f9fbda63ab Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 4 Oct 2023 11:21:00 -0400 Subject: [PATCH 195/413] Changed Enhancements to minor_changes --- .../fragments/943-enhance-Add-wait-zos-operator-and-query.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 59547c8d4..71e24fc14 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,4 +1,4 @@ -enhancements: +minor_changes: - zos_operator: Added the 'wait' parameter back in to use the new -w operator. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. From 36e6368a96c963309f91e44db9a4330db7fc5250 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 5 Oct 2023 22:46:11 -0700 Subject: [PATCH 196/413] push updated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_copy.rst | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 45dee10a7..71cd094fc 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -74,7 +74,7 @@ dest If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. - If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If ``src`` is binary, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. When ``dest`` is a data set, precedence rules apply. If ``dest_data_set`` is set, this will take precedence over an existing data set. If ``dest`` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, ``dest`` will be created with the same attributes of ``src``. @@ -156,6 +156,21 @@ is_binary | **type**: bool +executable + If set to ``true``, indicates that the file or library to be copied is an executable. + + If the ``src`` executable has an alias, the alias information is also copied. If the ``dest`` is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. + + If *executable=true*, and ``dest`` is a data set, it must be a PDS or PDSE (library). + + If ``dest`` is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. + + If ``dest`` is a file, execute permission for the user will be added to the file (``u+x``). + + | **required**: False + | **type**: bool + + local_follow This flag indicates that any existing filesystem links in the source tree should be followed. @@ -247,7 +262,7 @@ dest_data_set | **required**: True | **type**: str - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC + | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC, LIBRARY space_primary @@ -672,6 +687,13 @@ Examples record_format: VB record_length: 150 + - name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. + zos_copy: + src: HLQ.COBOLSRC.PDSE(TESTPGM) + dest: HLQ.NEW.PDSE(MYCOBOL) + remote_src: true + executable: true + @@ -691,6 +713,8 @@ Notes `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option executable that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will be responded with a (FSUM8976,./zos_copy.html) error. + See Also From 94985a39ff1702dc763028f2caca6a237f8fc581 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 6 Oct 2023 10:04:42 -0600 Subject: [PATCH 197/413] Enabler/validate path join (#962) * Added real path fetch to base * Sec changes * Updated changelog and template * Added validation to zos_copy and zos_fetch Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Corrected positional argument * Added validation changes Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed f-string * Fixed path join for copy_to_file --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/963-validate-path-join.yml | 5 +++ plugins/action/zos_copy.py | 4 +- plugins/action/zos_fetch.py | 8 ++-- plugins/module_utils/encode.py | 12 ++--- plugins/module_utils/template.py | 4 +- plugins/module_utils/validation.py | 44 +++++++++++++++++++ plugins/modules/zos_copy.py | 30 ++++++++----- plugins/modules/zos_fetch.py | 3 +- 8 files changed, 84 insertions(+), 26 deletions(-) create mode 100644 changelogs/fragments/963-validate-path-join.yml create mode 100644 plugins/module_utils/validation.py diff --git a/changelogs/fragments/963-validate-path-join.yml b/changelogs/fragments/963-validate-path-join.yml new file mode 100644 index 000000000..017c793cc --- /dev/null +++ b/changelogs/fragments/963-validate-path-join.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_fetch: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_copy: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index c6273132c..afc454359 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -33,7 +33,7 @@ is_data_set ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template @@ -186,7 +186,7 @@ def run(self, tmp=None, task_vars=None): src = rendered_dir task_args["size"] = sum( - os.stat(os.path.join(path, f)).st_size + os.stat(os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(f))).st_size for path, dirs, files in os.walk(src) for f in files ) diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index e10dbd75f..087c70953 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -26,7 +26,7 @@ from ansible.utils.display import Display from ansible import cli -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation SUPPORTED_DS_TYPES = frozenset({"PS", "PO", "VSAM", "USS"}) @@ -182,10 +182,12 @@ def run(self, tmp=None, task_vars=None): if dest.endswith(os.sep): if fetch_member: base = os.path.dirname(dest) - dest = os.path.join(base, member_name) + dest = os.path.join(validation.validate_safe_path(base), validation.validate_safe_path(member_name)) + display.vvv(u"This is how dest looks {0}".format(dest), host=self._play_context.remote_addr) else: base = os.path.basename(source_local) - dest = os.path.join(dest, base) + dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(base)) + display.vvv(u"This is how dest looks {0}".format(dest), host=self._play_context.remote_addr) if not dest.startswith("/"): dest = self._loader.path_dwim(dest) else: diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index a96bf46d5..047aa654c 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -31,7 +31,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import copy, system +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import copy, system, validation from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) @@ -327,7 +327,7 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): if path.isdir(src): for (dir, subdir, files) in walk(src): for file in files: - file_list.append(path.join(dir, file)) + file_list.append(path.join(validation.validate_safe_path(dir), validation.validate_safe_path(file))) if len(file_list) == 0: raise EncodeError( "Directory {0} is empty. Please check the path.".format(src) @@ -335,8 +335,8 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): elif len(file_list) == 1: if path.isdir(dest): file_name = path.basename(file_list[0]) - src_f = path.join(src, file_name) - dest_f = path.join(dest, file_name) + src_f = path.join(validation.validate_safe_path(src), validation.validate_safe_path(file_name)) + dest_f = path.join(validation.validate_safe_path(dest), validation.validate_safe_path(file_name)) convert_rc = self.uss_convert_encoding( src_f, dest_f, from_code, to_code ) @@ -361,7 +361,7 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): else: if path.isdir(dest): file_name = path.basename(path.abspath(src)) - dest = path.join(dest, file_name) + dest = path.join(validation.validate_safe_path(dest), validation.validate_safe_path(file_name)) convert_rc = self.uss_convert_encoding(src, dest, from_code, to_code) return convert_rc @@ -433,7 +433,7 @@ def mvs_convert_encoding( elif dest_type == "PO": for (dir, subdir, files) in walk(temp_dest): for file in files: - temp_file = path.join(dir, file) + temp_file = path.join(validation.validate_safe_path(dir), validation.validate_safe_path(file)) rc, out, err = copy.copy_uss2mvs(temp_file, dest, "PO") convert_rc = True else: diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 308946da2..407a231c6 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -32,7 +32,7 @@ except Exception: jinja2 = MissingImport("jinja2") -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation def _process_boolean(arg, default=False): @@ -283,7 +283,7 @@ def render_dir_template(self, variables): try: temp_parent_dir = tempfile.mkdtemp() last_dir = os.path.basename(self.template_dir) - temp_template_dir = os.path.join(temp_parent_dir, last_dir) + temp_template_dir = os.path.join(validation.validate_safe_path(temp_parent_dir), validation.validate_safe_path(last_dir)) os.makedirs(temp_template_dir, exist_ok=True) except FileExistsError as err: raise FileExistsError("Unable to create directory for rendered templates: {0}".format( diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py new file mode 100644 index 000000000..c08847503 --- /dev/null +++ b/plugins/module_utils/validation.py @@ -0,0 +1,44 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +""" +Class implemented for common validations that are not specific to z/OS but rather system or +security related. + +""" +import os + + +def validate_safe_path(path): + """ + This function is implemented to validate against path traversal attack + when using os.path.join function. + + In this action plugin, path is on the controller. + """ + if not os.path.isabs(path): + real_path = os.path.realpath(path) + if not os.path.exists(real_path) and not real_path.endswith(os.sep): + # if path doesn't exist and does not contain separator then is likely a member. + return path + if not os.access(path=real_path, mode=os.F_OK): + raise DirectoryTraversalError(real_path) + return path + + +class DirectoryTraversalError(Exception): + def __init__(self, path): + self.msg = "Detected directory traversal, user does not have access to {0}".format(path) + super().__init__(self.msg) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index aabd5447e..625e2e6b2 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -743,7 +743,7 @@ idcams ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, data_set, encode, backup, copy + better_arg_parser, data_set, encode, backup, copy, validation, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, @@ -958,7 +958,7 @@ def _convert_encoding_dir(self, dir_path, from_code_set, to_code_set): enc_utils = encode.EncodeUtils() for path, dirs, files in os.walk(dir_path): for file_path in files: - full_file_path = os.path.join(path, file_path) + full_file_path = os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(file_path)) rc = enc_utils.uss_convert_encoding( full_file_path, full_file_path, from_code_set, to_code_set ) @@ -1159,7 +1159,9 @@ def copy_to_uss( self.module.set_mode_if_different(dest, mode, False) if changed_files: for filepath in changed_files: - self.module.set_mode_if_different(os.path.join(dest, filepath), mode, False) + self.module.set_mode_if_different( + os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(filepath)), mode, False + ) if group is not None: self.module.set_group_if_different(dest, group, False) if owner is not None: @@ -1182,9 +1184,9 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): Returns: {str} -- Destination where the file was copied to """ + src_path = os.path.basename(src) if src else "inline_copy" if os.path.isdir(dest): - dest = os.path.join(dest, os.path.basename(src) - if src else "inline_copy") + dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) new_src = temp_path or conv_path or src try: @@ -1250,13 +1252,13 @@ def _copy_to_dir( try: if copy_directory: - dest = os.path.join(dest_dir, os.path.basename(os.path.normpath(src_dir))) + dest = os.path.join(validation.validate_safe_path(dest_dir), validation.validate_safe_path(os.path.basename(os.path.normpath(src_dir)))) dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) # Restoring permissions for preexisting files and subdirectories. for filepath, permissions in original_permissions: mode = "0{0:o}".format(stat.S_IMODE(permissions)) - self.module.set_mode_if_different(os.path.join(dest, filepath), mode, False) + self.module.set_mode_if_different(os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(filepath)), mode, False) except Exception as err: raise CopyOperationError( msg="Error while copying data to destination directory {0}".format(dest_dir), @@ -1291,7 +1293,9 @@ def _get_changed_files(self, src, dest, copy_directory): files_to_change = [] existing_files = [] for relative_path in files_to_copy: - if os.path.exists(os.path.join(dest, parent_dir, relative_path)): + if os.path.exists( + os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(parent_dir), validation.validate_safe_path(relative_path)) + ): existing_files.append(relative_path) else: files_to_change.append(relative_path) @@ -1301,7 +1305,9 @@ def _get_changed_files(self, src, dest, copy_directory): files_to_change.extend(existing_files) # Creating tuples with (filename, permissions). original_permissions = [ - (filepath, os.stat(os.path.join(dest, parent_dir, filepath)).st_mode) + (filepath, os.stat( + os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(parent_dir), validation.validate_safe_path(filepath)) + ).st_mode) for filepath in existing_files ] @@ -1323,11 +1329,11 @@ def _walk_uss_tree(self, dir): for dirpath, subdirs, files in os.walk(".", True): paths += [ - os.path.join(dirpath, subdir).replace("./", "") + os.path.join(validation.validate_safe_path(dirpath), validation.validate_safe_path(subdir)).replace("./", "") for subdir in subdirs ] paths += [ - os.path.join(dirpath, filepath).replace("./", "") + os.path.join(validation.validate_safe_path(dirpath), validation.validate_safe_path(filepath)).replace("./", "") for filepath in files ] @@ -2663,7 +2669,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED: if not remote_src and not copy_member and os.path.isdir(temp_path): - temp_path = os.path.join(temp_path, os.path.basename(src)) + temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( module, is_binary=is_binary, executable=executable, backup_name=backup_name diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index ca6359c55..d8b15c0d9 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -282,6 +282,7 @@ better_arg_parser, data_set, encode, + validation, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( MissingZOAUImport, @@ -511,7 +512,7 @@ def _fetch_pdse(self, src, is_binary, encoding=None): root, dirs, files = next(os.walk(dir_path)) try: for file in files: - file_path = os.path.join(root, file) + file_path = os.path.join(validation.validate_safe_path(root), validation.validate_safe_path(file)) enc_utils.uss_convert_encoding( file_path, file_path, from_code_set, to_code_set ) From dd10d0d9065fb27eefcefda11eccba1b1b8cfa97 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 6 Oct 2023 15:15:06 -0400 Subject: [PATCH 198/413] Removed 'wait' as an option, and pass wait_arg=true to zoau --- ...nhance-Add-wait-zos-operator-and-query.yml | 4 ++-- plugins/modules/zos_operator.py | 23 +------------------ plugins/modules/zos_operator_action_query.py | 23 ++----------------- 3 files changed, 5 insertions(+), 45 deletions(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 71e24fc14..dd1829148 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,6 +1,6 @@ minor_changes: - - zos_operator: Added the 'wait' parameter back in to use the new -w operator. + - zos_operator: Changed system to call 'wait=true' parameter to zoau call. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. + - zos_operator_action_query: Add wait_time_s parameter in the operator_action_query. (https://github.com/ansible-collections/ibm_zos_core/pull/976) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 35f155e65..ab34aa0cc 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -55,14 +55,6 @@ type: int required: false default: 1 - wait: - description: - - Setting this option will tell the system to wait the full wait_time, instead - of returning on first data received - - This option is only available with zoau 1.2.5 or later - type: bool - required: false - default: false """ EXAMPLES = r""" @@ -83,12 +75,6 @@ zos_operator: cmd: 'd a,all' wait_time_s: 5 - wait: true - -- name: Execute operator command to show jobs, waiting up to 7 seconds for response - zos_operator: - cmd: 'd a,all' - wait_time_s: 7 - name: Display the system symbols and associated substitution texts. zos_operator: @@ -199,7 +185,6 @@ def run_module(): cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), wait_time_s=dict(type="int", required=False, default=1), - wait=dict(type="bool", required=False, default=False), ) result = dict(changed=False) @@ -270,7 +255,6 @@ def parse_params(params): cmd=dict(arg_type="str", required=True), verbose=dict(arg_type="bool", required=False), wait_time_s=dict(arg_type="int", required=False), - wait=dict(arg_type="bool", required=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -299,12 +283,7 @@ def run_operator_command(params): getit = True if getit: - if params.get("wait"): - kwargs.update({"wait_arg": True}) - else: - kwargs.pop("wait_arg", "0") - else: - kwargs.pop("wait_arg", "0") + kwargs.update({"wait_arg": True}) args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 77d130697..877b265e5 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -63,21 +63,10 @@ - When set to 0, the system default is used. - This option is helpful on a busy system requiring more time to execute commands. - - Setting I(wait) can instruct if execution should wait the - full I(wait_time_s). - Because 2 functions are called, potential time delay is doubled. type: int required: false default: 1 - wait: - description: - - Setting this option will tell the system to wait the full wait_time, instead - of returning on first data received - - Because 2 functions are called, potential time delay is doubled. - - This option is only available with zoau 1.2.5 or later - type: bool - required: false - default: false message_filter: description: - Return outstanding messages requiring operator action awaiting a @@ -123,7 +112,7 @@ job_name: im5* - name: Display all outstanding messages whose job name begin with im7, - wait up to 10 seconds per call (20 seconds overall) for data + waiting 10 seconds per call (20 seconds overall) for data zos_operator_action_query: job_name: im7* wait_time_s: 10 @@ -133,7 +122,6 @@ zos_operator_action_query: job_name: im9* wait_time_s: 15 - wait: True - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: @@ -275,7 +263,6 @@ def run_module(): message_id=dict(type="str", required=False), job_name=dict(type="str", required=False), wait_time_s=dict(type="int", required=False, default=1), - wait=dict(type="bool", required=False, default=False), message_filter=dict( type="dict", required=False, @@ -306,12 +293,7 @@ def run_module(): getit = True if getit: - if new_params.get("wait"): - kwargs.update({"wait_arg": True}) - else: - kwargs.pop("wait_arg", "0") - else: - kwargs.pop("wait_arg", "0") + kwargs.update({"wait_arg": True}) args = [] @@ -366,7 +348,6 @@ def parse_params(params): message_id=dict(arg_type=message_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), wait_time_s=dict(arg_type="int", required=False), - wait=dict(arg_type="bool", required=False), message_filter=dict(arg_type=message_filter_type, required=False) ) parser = BetterArgParser(arg_defs) From 9690f487639071576ad31e986ecc1dfafea9a88c Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Sat, 7 Oct 2023 09:49:21 -0600 Subject: [PATCH 199/413] Added latest to allow member copy when disp=shr (#980) * Added latest to allow member copy when disp=shr * Added changelog fragment * Added new force option and test for locked data sets non VSAM * Fixed pep8 issue * Added new option force lock * Modified test case with new option * Added force option * Added doc and warning * Updated changelog fragment * Update 980-zos-copy-disp-shr.yml * Updated changelog fragment * Removed unused comments Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added message * Added force_lock to all CopyHandlers * Modified test case * Changed use of dataset vs data set --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/980-zos-copy-disp-shr.yml | 5 ++ plugins/action/zos_copy.py | 4 ++ plugins/modules/zos_copy.py | 66 +++++++++++++++---- .../functional/modules/test_zos_copy_func.py | 47 +++++++++---- 4 files changed, 99 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/980-zos-copy-disp-shr.yml diff --git a/changelogs/fragments/980-zos-copy-disp-shr.yml b/changelogs/fragments/980-zos-copy-disp-shr.yml new file mode 100644 index 000000000..541e611c1 --- /dev/null +++ b/changelogs/fragments/980-zos-copy-disp-shr.yml @@ -0,0 +1,5 @@ +minor_changes: +- zos_copy - Add new option `force_lock` that can copy into data sets that are + already in use by other processes (DISP=SHR). User needs to use with caution + because this is subject to race conditions and can lead to data loss. + (https://github.com/ansible-collections/ibm_zos_core/pull/980). diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index afc454359..6b86d24a3 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -59,6 +59,7 @@ def run(self, tmp=None, task_vars=None): local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) is_binary = _process_boolean(task_args.get('is_binary'), default=False) + force_lock = _process_boolean(task_args.get('force_lock'), default=False) executable = _process_boolean(task_args.get('executable'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) backup_name = task_args.get("backup_name", None) @@ -126,6 +127,9 @@ def run(self, tmp=None, task_vars=None): msg = "Cannot specify 'mode', 'owner' or 'group' for MVS destination" return self._exit_action(result, msg, failed=True) + if force_lock: + display.warning( + msg="Using force_lock uses operations that are subject to race conditions and can lead to data loss, use with caution.") template_dir = None if not remote_src: diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 625e2e6b2..073e11688 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -151,6 +151,22 @@ type: bool default: false required: false + force_lock: + description: + - By default, when c(dest) is a MVS data set and is being used by another + process with DISP=SHR or DISP=OLD the module will fail. Use C(force_lock) + to bypass this check and continue with copy. + - If set to C(true) and destination is a MVS data set opened by another + process then zos_copy will try to copy using DISP=SHR. + - Using C(force_lock) uses operations that are subject to race conditions + and can lead to data loss, use with caution. + - If a data set member has aliases, and is not a program + object, copying that member to a dataset that is in use will result in + the aliases not being preserved in the target dataset. When this scenario + occurs the module will fail. + type: bool + default: false + required: false ignore_sftp_stderr: description: - During data transfer through SFTP, the module fails if the SFTP command @@ -778,7 +794,8 @@ def __init__( module, is_binary=False, executable=False, - backup_name=None + backup_name=None, + force_lock=False, ): """Utility class to handle copying data between two targets @@ -793,11 +810,15 @@ def __init__( is executable backup_name {str} -- The USS path or data set name of destination backup + force_lock {str} -- Whether the dest data set should be copied into + using disp=shr when is opened by another + process. """ self.module = module self.is_binary = is_binary self.executable = executable self.backup_name = backup_name + self.force_lock = force_lock def run_command(self, cmd, **kwargs): """ Wrapper for AnsibleModule.run_command """ @@ -824,10 +845,14 @@ def copy_to_seq( """ new_src = conv_path or temp_path or src copy_args = dict() + copy_args["options"] = "" if self.is_binary: copy_args["options"] = "-B" + if self.force_lock: + copy_args["options"] += " -f" + response = datasets._copy(new_src, dest, None, **copy_args) if response.rc != 0: raise CopyOperationError( @@ -847,10 +872,12 @@ def copy_to_vsam(self, src, dest): src {str} -- The name of the source VSAM dest {str} -- The name of the destination VSAM """ + out_dsp = "shr" if self.force_lock else "old" + dds = {"OUT": "{0},{1}".format(dest.upper(), out_dsp)} repro_cmd = """ REPRO - INDATASET('{0}') - - OUTDATASET('{1}')""".format(src.upper(), dest.upper()) - rc, out, err = idcams(repro_cmd, authorized=True) + OUTFILE(OUT)""".format(src.upper()) + rc, out, err = idcams(repro_cmd, dds=dds, authorized=True) if rc != 0: raise CopyOperationError( msg=("IDCAMS REPRO encountered a problem while " @@ -1404,7 +1431,8 @@ def __init__( module, is_binary=False, executable=False, - backup_name=None + backup_name=None, + force_lock=False, ): """ Utility class to handle copying to partitioned data sets or partitioned data set members. @@ -1422,7 +1450,8 @@ def __init__( module, is_binary=is_binary, executable=executable, - backup_name=backup_name + backup_name=backup_name, + force_lock=force_lock, ) def copy_to_pdse( @@ -1543,6 +1572,7 @@ def copy_to_member( src = src.replace("$", "\\$") dest = dest.replace("$", "\\$").upper() opts = dict() + opts["options"] = "" if self.is_binary: opts["options"] = "-B" @@ -1550,6 +1580,9 @@ def copy_to_member( if self.executable: opts["options"] = "-IX" + if self.force_lock: + opts["options"] += " -f" + response = datasets._copy(src, dest, None, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response @@ -2234,7 +2267,7 @@ def data_set_locked(dataset_name): dataset_name (str) - the data set name used to check if there is a lock. Returns: - bool -- rue if the data set is locked, or False if the data set is not locked. + bool -- True if the data set is locked, or False if the data set is not locked. """ # Using operator command "D GRS,RES=(*,{dataset_name})" to detect if a data set # is in use, when a data set is in use it will have "EXC/SHR and SHARE" @@ -2294,6 +2327,7 @@ def run_module(module, arg_def): copy_member = module.params.get('copy_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') + force_lock = module.params.get('force_lock') dest_data_set = module.params.get('dest_data_set') if dest_data_set: @@ -2472,10 +2506,11 @@ def run_module(module, arg_def): # for try to write in dest and if both src and dest are in lock. # ******************************************************************** if dest_ds_type != "USS": - is_dest_lock = data_set_locked(dest_name) - if is_dest_lock: - module.fail_json( - msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + if not force_lock: + is_dest_lock = data_set_locked(dest_name) + if is_dest_lock: + module.fail_json( + msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) # ******************************************************************** # Backup should only be performed if dest is an existing file or # data set. Otherwise ignored. @@ -2590,7 +2625,8 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, - backup_name=backup_name + backup_name=backup_name, + force_lock=force_lock, ) try: @@ -2672,7 +2708,11 @@ def run_module(module, arg_def): temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( - module, is_binary=is_binary, executable=executable, backup_name=backup_name + module, + is_binary=is_binary, + executable=executable, + backup_name=backup_name, + force_lock=force_lock, ) pdse_copy_handler.copy_to_pdse( @@ -2808,6 +2848,7 @@ def main(): src_member=dict(type='bool'), local_charset=dict(type='str'), force=dict(type='bool', default=False), + force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), tmp_hlq=dict(type='str', required=False, default=None), ), @@ -2827,6 +2868,7 @@ def main(): checksum=dict(arg_type='str', required=False), validate=dict(arg_type='bool', required=False), volume=dict(arg_type='str', required=False), + force_lock=dict(type='bool', default=False), dest_data_set=dict( arg_type='dict', diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 2bcf59a21..9c8aa9f9b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -185,7 +185,7 @@ call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' +SH /tmp/disp_shr/pdse-lock '{0}' //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -1565,22 +1565,30 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -def test_copy_dest_lock(ansible_zos_module): +@pytest.mark.parametrize("ds_type", ["PDS", "PDSE", "SEQ"]) +def test_copy_dest_lock(ansible_zos_module, ds_type): DATASET_1 = "USER.PRIVATE.TESTDS" DATASET_2 = "ADMI.PRIVATE.TESTDS" MEMBER_1 = "MEM1" + if ds_type == "PDS" or ds_type == "PDSE": + src_data_set = DATASET_1 + "({0})".format(MEMBER_1) + dest_data_set = DATASET_2 + "({0})".format(MEMBER_1) + else: + src_data_set = DATASET_1 + dest_data_set = DATASET_2 try: hosts = ansible_zos_module hosts.all.zos_data_set(name=DATASET_1, state="present", type="pdse", replace=True) hosts.all.zos_data_set(name=DATASET_2, state="present", type="pdse", replace=True) - hosts.all.zos_data_set(name=DATASET_1 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) - hosts.all.zos_data_set(name=DATASET_2 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) + if ds_type == "PDS" or ds_type == "PDSE": + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) # copy text_in source - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(DUMMY_DATA, DATASET_2+"({0})".format(MEMBER_1))) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) hosts.all.zos_copy( - content=call_c_jcl.format(DATASET_1, MEMBER_1), + content=call_c_jcl.format(dest_data_set), dest='/tmp/disp_shr/call_c_pgm.jcl', force=True ) @@ -1590,14 +1598,31 @@ def test_copy_dest_lock(ansible_zos_module): # pause to ensure c code acquires lock time.sleep(5) results = hosts.all.zos_copy( - src = DATASET_2 + "({0})".format(MEMBER_1), - dest = DATASET_1 + "({0})".format(MEMBER_1), - remote_src = True + src = src_data_set, + dest = dest_data_set, + remote_src = True, + force=True, + force_lock=True, ) for result in results.contacted.values(): print(result) - assert result.get("changed") == False - assert result.get("msg") is not None + assert result.get("changed") == True + assert result.get("msg") is None + # verify that the content is the same + verify_copy = hosts.all.shell( + cmd="dcat \"{0}\"".format(dest_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result in verify_copy.contacted.values(): + print(vp_result) + verify_copy_2 = hosts.all.shell( + cmd="dcat \"{0}\"".format(src_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result_2 in verify_copy_2.contacted.values(): + print(vp_result_2) + assert vp_result_2.get("stdout") == vp_result.get("stdout") + finally: # extract pid ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") From 60250ee2065a361b2a491f2fc98157f473931e53 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 9 Oct 2023 14:16:49 -0400 Subject: [PATCH 200/413] Changed operator_action_query to wait=false time = 5 Renamed vague variable name to "use_wait_arg" Reflected changes and 1.2.5 dependancy in the changelog fragment --- ...nhance-Add-wait-zos-operator-and-query.yml | 4 +- plugins/modules/zos_operator.py | 10 ++--- plugins/modules/zos_operator_action_query.py | 40 +++++-------------- 3 files changed, 17 insertions(+), 37 deletions(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index dd1829148..5a8202c34 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,6 +1,8 @@ minor_changes: - zos_operator: Changed system to call 'wait=true' parameter to zoau call. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query: Add wait_time_s parameter in the operator_action_query. + - zos_operator_action_query: Add a max delay of 5 seconds on each part of the operator_action_query. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index ab34aa0cc..2d1fb807f 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -274,15 +274,15 @@ def run_operator_command(params): cmdtxt = params.get("cmd") zv = ZOAU_API_VERSION.split(".") - getit = False + use_wait_arg = False if zv[0] > "1": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] > "2": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": - getit = True + use_wait_arg = True - if getit: + if use_wait_arg: kwargs.update({"wait_arg": True}) args = [] diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 877b265e5..ddf895eb9 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -29,6 +29,8 @@ - "Ping Xiao (@xiaoping8385)" - "Demetrios Dimatos (@ddimatos)" - "Ivan Moreno (@rexemin)" + - "Rich Parker (@richp405)" + options: system: description: @@ -57,16 +59,6 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false - wait_time_s: - description: - - Set maximum time in seconds to wait for the commands to execute. - - When set to 0, the system default is used. - - This option is helpful on a busy system requiring more time to execute - commands. - - Because 2 functions are called, potential time delay is doubled. - type: int - required: false - default: 1 message_filter: description: - Return outstanding messages requiring operator action awaiting a @@ -111,18 +103,6 @@ zos_operator_action_query: job_name: im5* -- name: Display all outstanding messages whose job name begin with im7, - waiting 10 seconds per call (20 seconds overall) for data - zos_operator_action_query: - job_name: im7* - wait_time_s: 10 - -- name: Display all outstanding messages whose job name begin with im9, - wait a full 15 seconds per call (30 seconds overall) for data - zos_operator_action_query: - job_name: im9* - wait_time_s: 15 - - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: message_id: dsi* @@ -262,7 +242,6 @@ def run_module(): system=dict(type="str", required=False), message_id=dict(type="str", required=False), job_name=dict(type="str", required=False), - wait_time_s=dict(type="int", required=False, default=1), message_filter=dict( type="dict", required=False, @@ -281,19 +260,19 @@ def run_module(): kwargs = {} - wait_s = new_params.get("wait_time_s") + wait_s = 5 zv = ZOAU_API_VERSION.split(".") - getit = False + use_wait_arg = False if zv[0] > "1": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] > "2": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": - getit = True + use_wait_arg = True - if getit: - kwargs.update({"wait_arg": True}) + if use_wait_arg: + kwargs.update({"wait_arg": False}) args = [] @@ -347,7 +326,6 @@ def parse_params(params): system=dict(arg_type=system_type, required=False), message_id=dict(arg_type=message_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), - wait_time_s=dict(arg_type="int", required=False), message_filter=dict(arg_type=message_filter_type, required=False) ) parser = BetterArgParser(arg_defs) From 4377ac2eadf269706c259e1eea013b5ea6554314 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Wed, 11 Oct 2023 14:01:34 -0700 Subject: [PATCH 201/413] Enhancement/423/zos copy add data set member alias support (#1014) * add aliases option and enable text-based member copy w alias to an existing pds Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * disable alias included in data set member listing when collecting src members from pds Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch 'alias' option introduced in zoau1.2.5 to '-H' flag available in zoau1.2.4. also enable alias copying of executables Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * implement aliases for copy to/from USS, add guard rail for non-executable copy to USS with aliases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add exception handler for executable PDS copy, handle non-existent library pds for executable USS src, add error message for PDS copy attempt to USS file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up init functions, break up long lines Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor executable member to member copy for alias work, this commit refactors some helpers which break a select few loadlib tests, but those will be refactored in upcoming comimts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor and expand test_copy_pds_loadlib_member_to_uss test case to copy to a new loadlib Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add aliases error raised check to text-based pds member copy to uss Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add aliases error raised check to text-based pds member copy to uss Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * parametrize cobol program to pass in custom output string, create helper method around running and validating loadlib pgms, refactor executable tests to use helper method, add helper method to create loadlib w multiple members, add test case for loadlib to loadlib copy w and w/o aliases. Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add test case for copying entire loadlib to uss dir and then to another loadlib. refactor other loadlib test case to reduce loc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new pytest markers for aliases and loadlib test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in a sneak preview version of bug #920 addressed in PR #968 which adds LIBRARY as a valid value to the dest_data_set option Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * alter placement of aliases option to go after executable options Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add docs and examples for aliases option Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * minor tweaks to doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * addres santiy check issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * missed a sanity check issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve remaining merge conflicts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 style issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup spacing issue in examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add updated rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add comments for explaning logic/code flow around full pds copy Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...4-zos-copy-add-data-set-member-aliases.yml | 5 + docs/source/modules/zos_copy.rst | 22 +- plugins/modules/zos_copy.py | 107 ++- .../functional/modules/test_zos_copy_func.py | 766 ++++++++++++++++-- tests/pytest.ini | 4 +- 5 files changed, 805 insertions(+), 99 deletions(-) create mode 100644 changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml diff --git a/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml b/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml new file mode 100644 index 000000000..4122ea878 --- /dev/null +++ b/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml @@ -0,0 +1,5 @@ +minor_changes: +- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases + when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. + Copying aliases of text based members to/from USS is not supported. + (https://github.com/ansible-collections/ibm_zos_core/pull/1014) \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 71cd094fc..191570bae 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -171,6 +171,17 @@ executable | **type**: bool +aliases + If set to ``true``, indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + + Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when ``executable=True`` and ``dest`` is a USS file or directory, this option will be ignored. + + Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. + + | **required**: False + | **type**: bool + + local_follow This flag indicates that any existing filesystem links in the source tree should be followed. @@ -687,12 +698,21 @@ Examples record_format: VB record_length: 150 - - name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. + - name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL zos_copy: src: HLQ.COBOLSRC.PDSE(TESTPGM) dest: HLQ.NEW.PDSE(MYCOBOL) remote_src: true executable: true + aliases: true + + - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE + zos_copy: + src: '/home/loadlib/' + dest: HLQ.LOADLIB.NEW + remote_src: true + executable: true + aliases: true diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 10b35ea22..c671d87a0 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -203,6 +203,16 @@ type: bool default: false required: false + aliases: + description: + - If set to C(true), indicates that any aliases found in the source + (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + - Aliases are implicitly preserved when libraries are copied over to USS destinations. + That is, when C(executable=True) and C(dest) is a USS file or directory, this option will be ignored. + - Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. + type: bool + default: false + required: false local_follow: description: - This flag indicates that any existing filesystem links in the source tree @@ -591,12 +601,21 @@ record_format: VB record_length: 150 -- name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. +- name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL zos_copy: src: HLQ.COBOLSRC.PDSE(TESTPGM) dest: HLQ.NEW.PDSE(MYCOBOL) remote_src: true executable: true + aliases: true + +- name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE + zos_copy: + src: '/home/loadlib/' + dest: HLQ.LOADLIB.NEW + remote_src: true + executable: true + aliases: true """ RETURN = r""" @@ -795,6 +814,7 @@ def __init__( module, is_binary=False, executable=False, + aliases=False, backup_name=None, force_lock=False, ): @@ -818,6 +838,7 @@ def __init__( self.module = module self.is_binary = is_binary self.executable = executable + self.aliases = aliases self.backup_name = backup_name self.force_lock = force_lock @@ -1097,6 +1118,7 @@ def __init__( module, is_binary=False, executable=False, + aliases=False, common_file_args=None, backup_name=None, ): @@ -1114,7 +1136,7 @@ def __init__( backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( - module, is_binary=is_binary, executable=executable, backup_name=backup_name + module, is_binary=is_binary, executable=executable, aliases=aliases, backup_name=backup_name ) self.common_file_args = common_file_args @@ -1149,6 +1171,7 @@ def copy_to_uss( self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) + if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) @@ -1393,6 +1416,7 @@ def _mvs_copy_to_uss( Keyword Arguments: member_name {str} -- The name of the source data set member """ + if os.path.isdir(dest): # If source is a data set member, destination file should have # the same name as the member. @@ -1403,9 +1427,10 @@ def _mvs_copy_to_uss( os.mkdir(dest) except FileExistsError: pass + opts = dict() if self.executable: - opts["options"] = "-IX" + opts["options"] = "-IX " try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: @@ -1421,7 +1446,17 @@ def _mvs_copy_to_uss( stderr=response.stderr_response ) else: - copy.copy_pds2uss(src, dest, is_binary=self.is_binary) + if self.executable: + response = datasets._copy(src, dest, None, **opts) + if response.rc != 0: + raise CopyOperationError( + msg="Error while copying source {0} to {1}".format(src, dest), + rc=response.rc, + stdout=response.stdout_response, + stderr=response.stderr_response + ) + else: + copy.copy_pds2uss(src, dest, is_binary=self.is_binary) except Exception as err: raise CopyOperationError(msg=str(err)) @@ -1432,6 +1467,7 @@ def __init__( module, is_binary=False, executable=False, + aliases=False, backup_name=None, force_lock=False, ): @@ -1451,6 +1487,7 @@ def __init__( module, is_binary=is_binary, executable=executable, + aliases=aliases, backup_name=backup_name, force_lock=force_lock, ) @@ -1516,7 +1553,13 @@ def copy_to_pdse( if src_member: members.append(data_set.extract_member_name(new_src)) else: - members = datasets.list_members(new_src) + # The 'members' variable below is used to store a list of members in the src PDS/E. + # Items in the list are passed to the copy_to_member function. + # Aliases are included in the output by list_members unless the alias option is disabled. + # The logic for preserving/copying aliases is contained in the copy_to_member function. + opts = {} + opts['options'] = '-H ' # mls option to hide aliases + members = datasets.list_members(new_src, **opts) src_members = ["{0}({1})".format(src_data_set_name, member) for member in members] dest_members = [ @@ -1525,7 +1568,7 @@ def copy_to_pdse( for member in members ] - existing_members = datasets.list_members(dest) + existing_members = datasets.list_members(dest) # fyi - this list includes aliases overwritten_members = [] new_members = [] @@ -1578,8 +1621,14 @@ def copy_to_member( if self.is_binary: opts["options"] = "-B" + if self.aliases and not self.executable: + # lower case 'i' for text-based copy (dcp) + opts["options"] = "-i" + if self.executable: - opts["options"] = "-IX" + opts["options"] = "-X" + if self.aliases: + opts["options"] = "-IX" if self.force_lock: opts["options"] += " -f" @@ -1817,6 +1866,7 @@ def is_compatible( Returns: {bool} -- Whether src can be copied to dest. """ + # ******************************************************************** # If the destination does not exist, then obviously it will need # to be created. As a result, target is compatible. @@ -2194,7 +2244,17 @@ def allocate_destination_data_set( # TODO: decide on whether to compute the longest file record length and use that for the whole PDSE. size = sum(os.stat("{0}/{1}".format(src, member)).st_size for member in os.listdir(src)) # This PDSE will be created with record format VB and a record length of 1028. - dest_params = get_data_set_attributes(dest, size, is_binary, type="PDSE", volume=volume) + + if executable: + dest_params = get_data_set_attributes( + dest, size, is_binary, + record_format='U', + record_length=0, + type="LIBRARY", + volume=volume + ) + else: + dest_params = get_data_set_attributes(dest, size, is_binary, type="PDSE", volume=volume) data_set.DataSet.ensure_present(replace=force, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: @@ -2311,6 +2371,7 @@ def run_module(module, arg_def): remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') executable = module.params.get('executable') + aliases = module.params.get('aliases') backup = module.params.get('backup') backup_name = module.params.get('backup_name') validate = module.params.get('validate') @@ -2502,7 +2563,7 @@ def run_module(module, arg_def): ) # ******************************************************************** - # To validate the source and dest are not lock in a batch process by + # To validate the source and dest are not locked in a batch process by # the machine and not generate a false positive check the disposition # for try to write in dest and if both src and dest are in lock. # ******************************************************************** @@ -2512,6 +2573,29 @@ def run_module(module, arg_def): if is_dest_lock: module.fail_json( msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + + # ******************************************************************** + # Alias support is not avaiable to and from USS for text-based data sets. + # ******************************************************************** + if aliases: + if (src_ds_type == 'USS' or dest_ds_type == 'USS') and not executable: + module.fail_json( + msg="Alias support for text-based data sets is not available " + + "for USS sources (src) or targets (dest). " + + "Try setting executable=True or aliases=False." + ) + + # ******************************************************************** + # Attempt to write PDS (not member) to USS file (i.e. a non-directory) + # ******************************************************************** + if ( + src_ds_type in data_set.DataSet.MVS_PARTITIONED and not src_member + and dest_ds_type == 'USS' and not os.path.isdir(dest) + ): + module.fail_json( + msg="Cannot write a partitioned data set (PDS) to a USS file." + ) + # ******************************************************************** # Backup should only be performed if dest is an existing file or # data set. Otherwise ignored. @@ -2523,6 +2607,7 @@ def run_module(module, arg_def): res_args["note"] = "Destination is empty, backup request ignored" else: backup_name = backup_data(dest, dest_ds_type, backup_name, tmphlq) + # ******************************************************************** # If destination does not exist, it must be created. To determine # what type of data set destination must be, a couple of simple checks @@ -2646,6 +2731,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, ) @@ -2712,6 +2798,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + aliases=aliases, backup_name=backup_name, force_lock=force_lock, ) @@ -2759,6 +2846,7 @@ def main(): dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), + aliases=dict(type='bool', default=False, required=False), encoding=dict( type='dict', required=False, @@ -2861,6 +2949,7 @@ def main(): dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), executable=dict(arg_type='bool', required=False, default=False), + aliases=dict(arg_type='bool', required=False, default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), backup_name=dict(arg_type='data_set_or_path', required=False), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 9c8aa9f9b..1fa6397e2 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -97,21 +97,33 @@ TEST_PDSE = "SYS1.NFSLIBE" TEST_PDSE_MEMBER = "SYS1.NFSLIBE(GFSAMAIN)" +COBOL_PRINT_STR = "HELLO WORLD ONE" +COBOL_PRINT_STR2 = "HELLO WORLD TWO" + COBOL_SRC = """ IDENTIFICATION DIVISION.\n PROGRAM-ID. HELLOWRD.\n \n PROCEDURE DIVISION.\n - DISPLAY "SIMPLE HELLO WORLD".\n + DISPLAY "{0}".\n STOP RUN.\n """ + + + +# format params for LINK_JCL: +# {0} - cobol src pds dsn +# {1} - cobol src pds member +# {2} - candidate loadlib dsn +# {3} - candidate loadlib member +# {4} - alias member name LINK_JCL = """ //COMPLINK JOB MSGCLASS=H,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //STEP1 EXEC PGM=IGYCRCTL //STEPLIB DD DSN=IGYV5R10.SIGYCOMP,DISP=SHR // DD DSN=IGYV5R10.SIGYMAC,DISP=SHR -//SYSIN DD DISP=SHR,DSN={0} +//SYSIN DD DISP=SHR,DSN={0}({1}) //SYSPRINT DD SYSOUT=* //SYSLIN DD UNIT=SYSDA,DISP=(MOD), // SPACE=(CYL,(1,1)), @@ -138,15 +150,18 @@ //SYSPRINT DD SYSOUT=* //SYSLIB DD DSN=CEE.SCEELKED,DISP=SHR // DD DSN=CEE.SCEELKEX,DISP=SHR -//SYSLMOD DD DSN={1}, +//SYSLMOD DD DSN={2}({3}), // DISP=SHR //SYSUT1 DD UNIT=SYSDA,DCB=BLKSIZE=1024, // SPACE=(TRK,(3,3)) //SYSTERM DD SYSOUT=* //SYSPRINT DD SYSOUT=* //SYSLIN DD DSN=&&LOADSET,DISP=(OLD,KEEP) -//SYSIN DD DUMMY +// DD * + ALIAS {4} + NAME {3} //* +//SYSIN DD DUMMY """ @@ -295,54 +310,87 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.file(path=record_src, state="absent") -def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): +def validate_loadlib_pgm(hosts, steplib, pgm_name, expected_output_str): + + mvscmd_str = "mvscmd --steplib='{0}' --pgm='{1}' --sysout='*' --sysprint='*'" + verify_copy_exec_pgm = hosts.all.shell( + cmd=mvscmd_str.format(steplib, pgm_name) + ) + + for v_cp_pgm in verify_copy_exec_pgm.contacted.values(): + assert v_cp_pgm.get("rc") == 0 + assert v_cp_pgm.get("stdout").strip() == expected_output_str + + +def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem='ALIAS1'): """ - Given a PDSE, links a cobol program making allocated in a temp ds resulting in ds_name - as a loadlib. + Given a PDSE, links a cobol program (allocated in a temp ds) resulting in a loadlib. Arguments: - ds_name (str) -- PDS/E to be linked with the cobol program. - cobol_src (str) -- Cobol source code to be used as the program. - - Notes: PDS names are in the format of SOME.PDSNAME(MEMBER) + cobol_src_pds (str) - cobol src pds dsn containing members containing cobol src code. + cobol_src_mem (str) - cobol src pds member containing cobol src code. + loadlib_pds (str) - candidate loadlib dsn + loadlib_mem (str) - candidate loadlib member + loadlib_alias_mem (str) - alias member name """ - # Copy the Link program - temp_jcl = "/tmp/link.jcl" + temp_jcl_uss_path = "/tmp/link.jcl" rc = 0 try: + # Copy over the Link program to USS cp_res = hosts.all.zos_copy( - content=LINK_JCL.format(cobol_pds, ds_name), - dest="/tmp/link.jcl", + content=LINK_JCL.format(cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem), + dest=temp_jcl_uss_path, force=True, ) - # Link the temp ds with ds_name + # Submit link JCL. job_result = hosts.all.zos_job_submit( src="/tmp/link.jcl", location="USS", wait_time_s=60 ) for result in job_result.contacted.values(): - #print("link job submit result {0}".format(result)) rc = result.get("jobs")[0].get("ret_code").get("code") finally: - hosts.all.file(path=temp_jcl, state="absent") + hosts.all.file(path=temp_jcl_uss_path, state="absent") return rc -def generate_executable_ds(hosts, src, dest, cobol): - member = "HELLOSRC" - hosts.all.zos_copy(content=COBOL_SRC, dest=cobol) - dest_name = "{0}({1})".format(dest, member) - src_name = "{0}({1})".format(src, member) - rc = link_loadlib_from_cobol(hosts, dest_name, cobol) - assert rc == 0 - cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" - hosts.all.shell(cmd=cmd.format(member, dest)) - rc = link_loadlib_from_cobol(hosts, src_name, cobol) - hosts.all.shell(cmd=cmd.format(member, src)) - assert rc == 0 - exec_res = hosts.all.shell(cmd=cmd.format(member, src)) - for result in exec_res.contacted.values(): - assert result.get("rc") == 0 + +def generate_executable_ds(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem="ALIAS1"): + + # copy COBOL src string to pds. + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mem)) + + # run link-edit to create loadlib. + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem) + assert link_rc == 0 + + # execute pgm to test loadlib + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mem, expected_output_str=COBOL_PRINT_STR) + + +def generate_loadlib(hosts, cobol_src_pds, cobol_src_mems, loadlib_pds, loadlib_mems, loadlib_alias_mems): + # copy cobol src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mems[0])) + # copy cobol2 src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR2), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mems[1])) + + # run link-edit for pgm1 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mems[0], loadlib_pds, loadlib_mems[0], loadlib_alias_mems[0]) + assert link_rc == 0 + # run link-edit for pgm2 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mems[1], loadlib_pds, loadlib_mems[1], loadlib_alias_mems[1]) + assert link_rc == 0 + + # execute pgm to test pgm1 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_mems[0], expected_output_str=COBOL_PRINT_STR) + # execute pgm to test alias of pgm1 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mems[0], expected_output_str=COBOL_PRINT_STR) + # execute pgm to test pgm2 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_mems[1], expected_output_str=COBOL_PRINT_STR2) + # execute pgm to test alias of pgm2 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mems[1], expected_output_str=COBOL_PRINT_STR2) + def generate_executable_uss(hosts, src, src_jcl_call): hosts.all.zos_copy(content=hello_world, dest=src, force=True) @@ -355,6 +403,7 @@ def generate_executable_uss(hosts, src, src_jcl_call): stdout = res.get("stdout") assert "Hello World" in str(stdout) + @pytest.mark.uss @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), @@ -2558,18 +2607,34 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): @pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases @pytest.mark.parametrize("is_created", ["true", "false"]) def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_created): hosts = ansible_zos_module - # The volume for this dataset should use a system symbol. # This dataset and member should be available on any z/OS system. - src = "USER.LOAD.SRC" - dest = "USER.LOAD.DEST" - cobol_pds = "USER.COBOL.SRC" - dest_exe = "USER.LOAD.EXE" + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem = "HELLO" + pgm_mem_alias = "ALIAS1" try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib hosts.all.zos_data_set( - name=src, + name=src_lib, state="present", type="pdse", record_format="U", @@ -2579,8 +2644,124 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr space_type="M", replace=True ) + + # generate loadlib into src_pds + generate_executable_ds(hosts, cobol_src_pds, cobol_src_mem, src_lib, pgm_mem, pgm_mem_alias) + + # tests existent/non-existent destination data set code path. + if not is_created: + # ensure dest data sets NOT present + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + else: + # pre-allocate dest loadlib to copy over without an alias. + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + # pre-allocate dest loadlib to copy over with an alias. + hosts.all.zos_data_set( + name=dest_lib_aliases, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # zos_copy w an executable: + copy_res = hosts.all.zos_copy( + src="{0}({1})".format(src_lib, pgm_mem), + dest="{0}({1})".format(dest_lib, pgm_mem), + remote_src=True, + executable=True, + aliases=False + ) + # zos_copy w an executables and its alias: + copy_res_aliases = hosts.all.zos_copy( + src="{0}({1})".format(src_lib, pgm_mem), + dest="{0}({1})".format(dest_lib_aliases, pgm_mem), + remote_src=True, + executable=True, + aliases=True + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + assert mls_alias_str not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + assert expected_mls_str in stdout + + # execute pgms to validate copy + validate_loadlib_pgm(hosts, steplib=dest_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib_aliases, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib_aliases, pgm_name=pgm_mem_alias, expected_output_str=COBOL_PRINT_STR) + + finally: + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + +@pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases +@pytest.mark.uss +def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): + hosts = ansible_zos_module + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + pgm_mem = "HELLO" + + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem_alias = "ALIAS1" + + uss_dest = "/tmp/HELLO" + try: + # allocate data sets hosts.all.zos_data_set( - name=dest, + name=src_lib, state="present", type="pdse", record_format="U", @@ -2591,7 +2772,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr replace=True ) hosts.all.zos_data_set( - name=cobol_pds, + name=cobol_src_pds, state="present", type="pds", space_primary=2, @@ -2600,12 +2781,190 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr block_size=3120, replace=True, ) - member = "HELLOSRC" - cobol_pds = "{0}({1})".format(cobol_pds, member) - generate_executable_ds(hosts, src, dest, cobol_pds) - if is_created: + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + hosts.all.zos_data_set( + name=dest_lib_aliases, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # generate loadlib into src_pds + generate_executable_ds(hosts, cobol_src_pds, cobol_src_mem, src_lib, pgm_mem, pgm_mem_alias) + + # zos_copy an executable to USS file: + copy_uss_res = hosts.all.zos_copy( + src="{0}({1})".format(src_lib, pgm_mem), + dest=uss_dest, + remote_src=True, + executable=True, + force=True) + for result in copy_uss_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + # run executable on USS + verify_exe_uss = hosts.all.shell( + cmd="{0}".format(uss_dest) + ) + for v_cp_u in verify_exe_uss.contacted.values(): + assert v_cp_u.get("rc") == 0 + assert COBOL_PRINT_STR == v_cp_u.get("stdout").strip() + + + # zos_copy from USS file w an executable: + copy_res = hosts.all.zos_copy( + src="{0}".format(uss_dest), + dest="{0}({1})".format(dest_lib, pgm_mem), + remote_src=True, + executable=True, + aliases=False + ) + # zos_copy from USS file w an executables and its alias: + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(uss_dest), + dest="{0}({1})".format(dest_lib_aliases, pgm_mem), + remote_src=True, + executable=True, + aliases=True + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + assert mls_alias_str not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + assert expected_mls_str in stdout + + # execute pgms to validate copy + validate_loadlib_pgm(hosts, steplib=dest_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib_aliases, pgm_name=pgm_mem_alias, expected_output_str=COBOL_PRINT_STR) + + finally: + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + hosts.all.file(name=uss_dest, state="absent") + + +@pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases +@pytest.mark.parametrize("is_created", ["false", "true"]) +def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): + + hosts = ansible_zos_module + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + pgm_mem_alias = "ALIAS1" + pgm2_mem_alias = "ALIAS2" + + + try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib + hosts.all.zos_data_set( + name=src_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # generate loadlib w 2 members w 1 alias each + generate_loadlib( + hosts=hosts, + cobol_src_pds=cobol_src_pds, + cobol_src_mems=[cobol_src_mem, cobol_src_mem2], + loadlib_pds=src_lib, + loadlib_mems=[pgm_mem, pgm2_mem], + loadlib_alias_mems=[pgm_mem_alias, pgm2_mem_alias] + ) + + if not is_created: + # ensure dest data sets absent for this variation of the test case. + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + else: + # allocate dest loadlib to copy over without an alias. hosts.all.zos_data_set( - name=dest_exe, + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + # allocate dest loadlib to copy over with an alias. + hosts.all.zos_data_set( + name=dest_lib_aliases, state="present", type="pdse", record_format="U", @@ -2615,42 +2974,153 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr space_type="M", replace=True ) - copy_res = hosts.all.zos_copy( - src="{0}({1})".format(src, member), - dest="{0}({1})".format(dest_exe, "MEM1"), - remote_src=True, - executable=True) - verify_copy = hosts.all.shell( - cmd="mls {0}".format(dest_exe), - executable=SHELL_EXECUTABLE - ) + if not is_created: + # dest data set does not exist, specify it in dest_dataset param. + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False, + dest_data_set={ + 'type': "LIBRARY", + 'record_format': "U", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "M", + } + ) + # copy src loadlib to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True, + dest_data_set={ + 'type': "LIBRARY", + 'record_format': "U", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "M", + } + ) + + else: + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False + ) + # copy src loadlib to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True + ) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - assert result.get("dest") == "{0}({1})".format(dest_exe, "MEM1") + assert result.get("dest") == "{0}".format(dest_lib) - for v_cp in verify_copy.contacted.values(): + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib_aliases) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): assert v_cp.get("rc") == 0 stdout = v_cp.get("stdout") assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + mls_alias_str2 = "ALIAS({0})".format(pgm2_mem_alias) + assert mls_alias_str not in stdout + assert mls_alias_str2 not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + expected_mls_str2 = "{0} ALIAS({1})".format(pgm2_mem, pgm2_mem_alias) + assert expected_mls_str in stdout + assert expected_mls_str2 in stdout + + # verify pgms remain executable + pgm_output_map = { + (dest_lib, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem_alias, COBOL_PRINT_STR), + (dest_lib, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem_alias, COBOL_PRINT_STR2) + } + for steplib, pgm, output in pgm_output_map: + validate_loadlib_pgm(hosts, steplib=steplib, pgm_name=pgm, expected_output_str=output) + finally: - hosts.all.zos_data_set(name=dest, state="absent") - hosts.all.zos_data_set(name=src, state="absent") - hosts.all.zos_data_set(name=cobol_pds, state="absent") + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") @pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases @pytest.mark.uss -def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): +def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): + hosts = ansible_zos_module - src = "USER.LOAD.SRC" - dest = "USER.LOAD.DEST" - cobol_pds = "USER.COBOL.SRC" - uss_dest = "/tmp/HELLO" + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + pgm_mem_alias = "ALIAS1" + pgm2_mem_alias = "ALIAS2" + + # note - aliases for executables are implicitly copied over (by module design) for USS targets. + uss_dir_path = '/tmp/uss-loadlib/' + try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib hosts.all.zos_data_set( - name=src, + name=src_lib, state="present", type="pdse", record_format="U", @@ -2660,8 +3130,22 @@ def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): space_type="M", replace=True ) + + # generate loadlib w 2 members w 1 alias each + generate_loadlib( + hosts=hosts, + cobol_src_pds=cobol_src_pds, + cobol_src_mems=[cobol_src_mem, cobol_src_mem2], + loadlib_pds=src_lib, + loadlib_mems=[pgm_mem, pgm2_mem], + loadlib_alias_mems=[pgm_mem_alias, pgm2_mem_alias] + ) + + # make dest USS dir + hosts.all.file(path=uss_dir_path, state="directory") + # allocate dest loadlib to copy over without an alias. hosts.all.zos_data_set( - name=dest, + name=dest_lib, state="present", type="pdse", record_format="U", @@ -2671,41 +3155,129 @@ def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): space_type="M", replace=True ) + # allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( - name=cobol_pds, + name=dest_lib_aliases, state="present", - type="pds", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, space_primary=2, - record_format="FB", - record_length=80, - block_size=3120, - replace=True, + space_type="M", + replace=True ) - member = "HELLOSRC" - cobol_pds = "{0}({1})".format(cobol_pds, member) - generate_executable_ds(hosts, src, dest, cobol_pds) - copy_uss_res = hosts.all.zos_copy( - src="{0}({1})".format(src, member), - dest=uss_dest, + + # copy src lib to USS dir + copy_res_uss = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(uss_dir_path), remote_src=True, executable=True, - force=True) - for result in copy_uss_res.contacted.values(): + ) + for result in copy_res_uss.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") == "{0}".format(uss_dir_path) + + # inspect USS dir contents + verify_exe_uss_ls = hosts.all.shell( + cmd='ls {0}/{1}'.format(uss_dir_path, src_lib.upper()) + ) + for v_exe_u_ls in verify_exe_uss_ls.contacted.values(): + assert v_exe_u_ls.get("rc") == 0 + assert "{0}\n{1}".format(src_lib.upper(), pgm_mem) + # run executables on USS verify_exe_uss = hosts.all.shell( - cmd="{0}".format(uss_dest) + cmd="{0}/{1}/{2}".format(uss_dir_path, src_lib.upper(), pgm_mem.lower()) ) for v_cp_u in verify_exe_uss.contacted.values(): assert v_cp_u.get("rc") == 0 - stdout = v_cp_u.get("stdout") - assert "SIMPLE HELLO WORLD" in str(stdout) + assert v_cp_u.get("stdout").strip() == COBOL_PRINT_STR + + verify_exe_uss = hosts.all.shell( + cmd="{0}/{1}/{2}".format(uss_dir_path, src_lib.upper(), pgm2_mem.lower()) + ) + for v_cp_u in verify_exe_uss.contacted.values(): + assert v_cp_u.get("rc") == 0 + assert v_cp_u.get("stdout").strip() == COBOL_PRINT_STR2 + + + # copy USS dir to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}/{1}".format(uss_dir_path, src_lib.upper()), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False + ) + # copy USS dir to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}{1}".format(uss_dir_path, src_lib.upper()), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib) + + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib_aliases) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + mls_alias_str2 = "ALIAS({0})".format(pgm2_mem_alias) + assert mls_alias_str not in stdout + assert mls_alias_str2 not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + expected_mls_str2 = "{0} ALIAS({1})".format(pgm2_mem, pgm2_mem_alias) + assert expected_mls_str in stdout + assert expected_mls_str2 in stdout + + # verify pgms remain executable + pgm_output_map = { + (dest_lib, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem_alias, COBOL_PRINT_STR), + (dest_lib, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem_alias, COBOL_PRINT_STR2) + } + + for steplib, pgm, output in pgm_output_map: + validate_loadlib_pgm(hosts, steplib=steplib, pgm_name=pgm, expected_output_str=output) + finally: - hosts.all.zos_data_set(name=dest, state="absent") - hosts.all.zos_data_set(name=src, state="absent") - hosts.all.zos_data_set(name=cobol_pds, state="absent") - hosts.all.file(name=uss_dest, state="absent") + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + hosts.all.file(path=uss_dir_path, state="absent") @pytest.mark.uss @@ -3001,6 +3573,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse +@pytest.mark.aliases @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module @@ -3020,6 +3593,14 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts.all.file(path=dest_path, state="directory") + # ensure aliases:True errors out for non-text member copy + copy_aliases_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, aliases=True) + for result in copy_aliases_res.contacted.values(): + error_msg = "Alias support for text-based data sets is not available" + assert result.get("failed") is True + assert result.get("changed") is False + assert error_msg in result.get("msg") + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest_path) @@ -3037,6 +3618,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse +@pytest.mark.aliases @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module @@ -3052,6 +3634,14 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): executable=SHELL_EXECUTABLE ) + # ensure aliases:True errors out for non-text member copy + copy_aliases_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, aliases=True) + for result in copy_aliases_res.contacted.values(): + error_msg = "Alias support for text-based data sets is not available" + assert result.get("failed") is True + assert result.get("changed") is False + assert error_msg in result.get("msg") + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest_path) verify_copy = hosts.all.shell( diff --git a/tests/pytest.ini b/tests/pytest.ini index a9324aaae..4226de838 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -8,4 +8,6 @@ markers = seq: sequential data sets test cases. pdse: partitioned data sets test cases. vsam: VSAM data sets test cases. - template: Jinja2 templating test cases. \ No newline at end of file + template: Jinja2 templating test cases. + aliases: aliases option test cases. + loadlib: executable copy test cases. \ No newline at end of file From bad20a85eb0f5cd805eed4f1398daf7f0a380446 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 13 Oct 2023 22:01:29 -0700 Subject: [PATCH 202/413] New module zos_script (#961) * First version of the action plugin and module doc * Added remote execution * Fixed handling and cleanup of temp files * Fixed mode setting for scripts * Fixed undefined variable error in action plugin * Fixed chdir when running command * Added creates and removes options * Changed encoding validation * Filled out docs for the module * Added examples * Filled out RETURN block for module * Enhanced error messages * Generated docs for zos_script * Added first tests for zos_script * Reordered args * Added mode check to remote script test * Fixed template rendering * Fixed tests * Added tests for error handling and templates * Fixed a sanity error when returning a failure JSON * Updated ignore files * Updated module docs * Updated repository templates for issues * Fixed whitespace in docs * Updated tmp_path description * Updated notes in documentation * Removed use of local_charset * Removed private args This commit finishes the work needed to remove two sanity tests exceptions. * Fixed permissions for remote scripts * Updated module documentation * Updated documentation for tmp_path --- .github/ISSUE_TEMPLATE/bug_issue.yml | 1 + .../ISSUE_TEMPLATE/collaboration_issue.yml | 1 + .github/ISSUE_TEMPLATE/doc_issue.yml | 1 + .github/ISSUE_TEMPLATE/enabler_issue.yml | 1 + .../enhancement_feature.issue.yml | 1 + docs/source/modules/zos_script.rst | 391 +++++++++++++++ plugins/action/zos_script.py | 161 ++++++ plugins/modules/zos_script.py | 397 +++++++++++++++ .../modules/test_zos_script_func.py | 458 ++++++++++++++++++ tests/sanity/ignore-2.10.txt | 3 + tests/sanity/ignore-2.11.txt | 3 + tests/sanity/ignore-2.12.txt | 3 + tests/sanity/ignore-2.13.txt | 1 + tests/sanity/ignore-2.14.txt | 1 + tests/sanity/ignore-2.15.txt | 1 + tests/sanity/ignore-2.16.txt | 1 + tests/sanity/ignore-2.9.txt | 3 + 17 files changed, 1428 insertions(+) create mode 100644 docs/source/modules/zos_script.rst create mode 100644 plugins/action/zos_script.py create mode 100644 plugins/modules/zos_script.py create mode 100644 tests/functional/modules/test_zos_script_func.py diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index d50883065..e03266e7b 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -127,6 +127,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index c9ac9f151..f601ce1e1 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -144,6 +144,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 5583ce5c1..38a8f1818 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -75,6 +75,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index abc9f16c2..d520148dc 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -40,6 +40,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index 3e1763091..f190ee70c 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -39,6 +39,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command - zos_unarchive validations: diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst new file mode 100644 index 000000000..6fc9a0ece --- /dev/null +++ b/docs/source/modules/zos_script.rst @@ -0,0 +1,391 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_script.py + +.. _zos_script_module: + + +zos_script -- Run scripts in z/OS +================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- The `zos_script <./zos_script.html>`_ module runs a local or remote script in the remote machine. + + + + + +Parameters +---------- + + +chdir + Change the script's working directory to this path. + + When not specified, the script will run in the user's home directory on the remote machine. + + | **required**: False + | **type**: str + + +cmd + Path to the local or remote script followed by optional arguments. + + If the script path contains spaces, make sure to enclose it in two pairs of quotes. + + Arguments may need to be escaped so the shell in the remote machine handles them correctly. + + | **required**: True + | **type**: str + + +creates + Path to a file in the remote machine. If it exists, the script will not be executed. + + | **required**: False + | **type**: str + + +encoding + Specifies which encodings the script should be converted from and to. + + If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. + + | **required**: False + | **type**: dict + + + from + The encoding to be converted from. + + | **required**: True + | **type**: str + + + to + The encoding to be converted to. + + | **required**: True + | **type**: str + + + +executable + Path of an executable in the remote machine to invoke the script with. + + When not specified, the system will assume the script is interpreted REXX and try to run it as such. Make sure to include a comment identifying the script as REXX at the start of the file in this case. + + | **required**: False + | **type**: str + + +remote_src + If set to ``false``, the module will search the script in the controller. + + If set to ``true``, the module will search the script in the remote machine. + + | **required**: False + | **type**: bool + + +removes + Path to a file in the remote machine. If it does not exist, the script will not be executed. + + | **required**: False + | **type**: str + + +tmp_path + Path in the remote machine where local scripts will be temporarily copied to. + + When not specified, the module will copy local scripts to the default temporary path for the user. + + If ``tmp_path`` does not exist in the remote machine, the module will not create it. + + | **required**: False + | **type**: str + + +use_template + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + + Only valid when ``src`` is a local file or directory. + + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + + | **required**: False + | **type**: bool + + +template_parameters + Options to set the way Jinja2 will process templates. + + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + + These options are ignored unless ``use_template`` is true. + + | **required**: False + | **type**: dict + + + variable_start_string + Marker for the beginning of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: {{ + + + variable_end_string + Marker for the end of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: }} + + + block_start_string + Marker for the beginning of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: {% + + + block_end_string + Marker for the end of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: %} + + + comment_start_string + Marker for the beginning of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: {# + + + comment_end_string + Marker for the end of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: #} + + + line_statement_prefix + Prefix used by Jinja2 to identify line-based statements. + + | **required**: False + | **type**: str + + + line_comment_prefix + Prefix used by Jinja2 to identify comment lines. + + | **required**: False + | **type**: str + + + lstrip_blocks + Whether Jinja2 should strip leading spaces from the start of a line to a block. + + | **required**: False + | **type**: bool + + + trim_blocks + Whether Jinja2 should remove the first newline after a block is removed. + + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + + | **required**: False + | **type**: bool + | **default**: True + + + keep_trailing_newline + Whether Jinja2 should keep the first trailing newline at the end of a template after rendering. + + | **required**: False + | **type**: bool + + + newline_sequence + Sequence that starts a newline in a template. + + | **required**: False + | **type**: str + | **default**: \\n + | **choices**: \\n, \\r, \\r\\n + + + auto_reload + Whether to reload a template file when it has changed after the task has started. + + | **required**: False + | **type**: bool + + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Run a local REXX script on the managed z/OS node. + zos_script: + cmd: ./scripts/HELLO + + - name: Run a local REXX script with args on the managed z/OS node. + zos_script: + cmd: ./scripts/ARGS "1,2" + + - name: Run a remote REXX script while changing its working directory. + zos_script: + cmd: /u/user/scripts/ARGS "1,2" + remote_src: true + chdir: /u/user/output_dir + + - name: Run a local Python script that uses a custom tmp_path. + zos_script: + cmd: ./scripts/program.py + executable: /usr/bin/python3 + tmp_path: /usr/tmp/ibm_zos_core + + - name: Run a local script made from a template. + zos_script: + cmd: ./templates/PROGRAM + use_template: true + + - name: Run a script only when a file is not present. + zos_script: + cmd: ./scripts/PROGRAM + creates: /u/user/pgm_result.txt + + - name: Run a script only when a file is already present on the remote machine. + zos_script: + cmd: ./scripts/PROGRAM + removes: /u/user/pgm_input.txt + + + + +Notes +----- + +.. note:: + When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. + + Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for the script will be restored by the module before the task ends. + + If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. + + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine. + + `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_copy_module` + - :ref:`zos_tso_command_module` + + + + +Return Values +------------- + + +cmd + Original command issued by the user. + + | **returned**: changed + | **type**: str + | **sample**: ./scripts/PROGRAM + +remote_cmd + Command executed on the remote machine. Will show the executable path used, and when running local scripts, will also show the temporary file used. + + | **returned**: changed + | **type**: str + | **sample**: /tmp/zos_script.jycqqfny.ARGS 1,2 + +msg + Failure or skip message returned by the module. + + | **returned**: failure or skipped + | **type**: str + | **sample**: File /u/user/file.txt is already missing on the system, skipping script + +rc + Return code of the script. + + | **returned**: changed + | **type**: int + | **sample**: 16 + +stdout + The STDOUT from the script, may be empty. + + | **returned**: changed + | **type**: str + | **sample**: Allocation to SYSEXEC completed. + +stderr + The STDERR from the script, may be empty. + + | **returned**: changed + | **type**: str + | **sample**: An error has ocurred. + +stdout_lines + List of strings containing individual lines from STDOUT. + + | **returned**: changed + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "Allocation to SYSEXEC completed." + ] + +stderr_lines + List of strings containing individual lines from STDERR. + + | **returned**: changed + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "An error has ocurred" + ] + diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py new file mode 100644 index 000000000..a17934ac4 --- /dev/null +++ b/plugins/action/zos_script.py @@ -0,0 +1,161 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import copy +import shlex +from os import path + +from ansible.plugins.action import ActionBase +from ansible.module_utils.parsing.convert_bool import boolean +from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule + +from ansible.utils.display import Display +display = Display() + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + if task_vars is None: + task_vars = dict() + + result = super(ActionModule, self).run(tmp, task_vars) + if result.get("skipped"): + return result + + module_args = self._task.args.copy() + + # First separating the command into the script path and its args + # if they are present. + cmd_parts = shlex.split(module_args.get('cmd')) + if len(cmd_parts) == 0: + result.update(dict( + changed=False, + failed=True, + invocation=dict(module_args=self._task.args), + msg="The command could not be validated, please check that it conforms to shell syntax." + )) + return result + + script_path = cmd_parts[0] + script_args = cmd_parts[1] if len(cmd_parts) > 1 else "" + remote_src = self._process_boolean(module_args.get('remote_src')) + user_cmd = tempfile_path = None + + # Copying the script when it's a local file. + if not remote_src: + script_path = path.abspath(path.normpath(script_path)) + script_name = path.basename(script_path) + tmp_path = module_args.get('tmp_path') + + # Getting a temporary path for the script. + tempfile_args = dict( + state="file", + path=tmp_path, + prefix="zos_script.", + suffix=".{0}".format(script_name) + ) + + tempfile_result = self._execute_module( + module_name="ansible.builtin.tempfile", + module_args=tempfile_args, + task_vars=task_vars + ) + result.update(tempfile_result) + + if not result.get("changed") or result.get("failed"): + result.update(dict( + changed=False, + failed=True, + invocation=dict( + module_args=self._task.args, + tempfile_args=tempfile_result.get('invocation', dict()).get('module_args') + ), + msg="An error ocurred while trying to create a tempfile for the script." + )) + return result + + tempfile_path = tempfile_result.get('path') + + # Letting zos_copy handle the transfer of the script. + zos_copy_args = dict( + src=script_path, + dest=tempfile_path, + force=True, + is_binary=False, + encoding=module_args.get('encoding'), + use_template=module_args.get('use_template', False), + template_parameters=module_args.get('template_parameters', dict()) + ) + copy_task = copy.deepcopy(self._task) + copy_task.args = zos_copy_args + zos_copy_action_plugin = ZosCopyActionModule( + task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj + ) + + zos_copy_result = zos_copy_action_plugin.run(task_vars=task_vars) + result.update(zos_copy_result) + + if not result.get("changed") or result.get("failed"): + result.update(dict( + changed=False, + failed=True, + invocation=dict( + module_args=self._task.args, + tempfile_args=tempfile_result.get('invocation', dict()).get('module_args'), + zos_copy_args=zos_copy_result.get('invocation', dict()).get('module_args') + ), + msg="An error ocurred while trying to copy the script to the managed node: {0}.".format( + zos_copy_result.get('msg') + ) + )) + return result + + # We're going to shadow the command supplied by the user with the remote + # tempfile we just created. + user_cmd = module_args.get('cmd') + module_args['cmd'] = '{0} {1}'.format(tempfile_path, script_args) + + module_result = self._execute_module( + module_name='ibm.ibm_zos_core.zos_script', + module_args=module_args, + task_vars=task_vars + ) + + result = module_result + if result.get('changed') and tempfile_path: + result['tempfile_path'] = tempfile_path + # The cmd field will return using the tempfile created, so we + # restore it to what the user supplied. + result['cmd'] = user_cmd + + if not remote_src: + self._remote_cleanup(tempfile_path) + + return result + + def _remote_cleanup(self, tempfile_path): + """Removes the temporary file in a managed node created for a local + script.""" + self._connection.exec_command("rm -f {0}".format(tempfile_path)) + + def _process_boolean(self, arg, default=False): + try: + return boolean(arg) + except TypeError: + return default diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py new file mode 100644 index 000000000..15699c4a1 --- /dev/null +++ b/plugins/modules/zos_script.py @@ -0,0 +1,397 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +DOCUMENTATION = r""" +--- +module: zos_script +version_added: '1.8.0' +author: + - "Ivan Moreno (@rexemin)" +short_description: Run scripts in z/OS +description: + - The L(zos_script,./zos_script.html) module runs a local or remote script + in the remote machine. + +options: + chdir: + description: + - Change the script's working directory to this path. + - When not specified, the script will run in the user's + home directory on the remote machine. + type: str + required: false + cmd: + description: + - Path to the local or remote script followed by optional arguments. + - If the script path contains spaces, make sure to enclose it in two + pairs of quotes. + - Arguments may need to be escaped so the shell in the remote machine + handles them correctly. + type: str + required: true + creates: + description: + - Path to a file in the remote machine. If it exists, the + script will not be executed. + type: str + required: false + encoding: + description: + - Specifies which encodings the script should be converted from and to. + - If C(encoding) is not provided, the module determines which local + and remote charsets to convert the data from and to. + type: dict + required: false + suboptions: + from: + description: + - The encoding to be converted from. + required: true + type: str + to: + description: + - The encoding to be converted to. + required: true + type: str + executable: + description: + - Path of an executable in the remote machine to invoke the + script with. + - When not specified, the system will assume the script is + interpreted REXX and try to run it as such. Make sure to + include a comment identifying the script as REXX at the + start of the file in this case. + type: str + required: false + remote_src: + description: + - If set to C(false), the module will search the script in the + controller. + - If set to C(true), the module will search the script in the + remote machine. + type: bool + required: false + removes: + description: + - Path to a file in the remote machine. If it does not exist, the + script will not be executed. + type: str + required: false + tmp_path: + description: + - Directory path in the remote machine where local scripts will be + temporarily copied to. + - When not specified, the module will copy local scripts to + the default temporary path for the user. + - If C(tmp_path) does not exist in the remote machine, the + module will not create it. + - All scripts copied to C(tmp_path) will be removed from the managed + node before the module finishes executing. + type: str + required: false + +extends_documentation_fragment: + - ibm.ibm_zos_core.template + +notes: + - When executing local scripts, temporary storage will be used + on the remote z/OS system. The size of the temporary storage will + correspond to the size of the file being copied. + - Execution permissions for the group assigned to the script will be + added to remote scripts. The original permissions for remote scripts will + be restored by the module before the task ends. + - The module will only add execution permissions for the file owner. + - If executing REXX scripts, make sure to include a newline character on + each line of the file. Otherwise, the interpreter may fail and return + error C(BPXW0003I). + - For supported character sets used to encode data, refer to the + L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine. + - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) + for the underlying transfer protocol; Co:Z SFTP is not supported. In + the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from + using Co:Z thus falling back to using standard SFTP. + - This module executes scripts inside z/OS UNIX System Services. For + running REXX scripts contained in data sets or CLISTs, consider issuing a TSO + command with L(zos_tso_command,./zos_tso_command.html). + - The community script module does not rely on Python to execute scripts on a + managed node, while this module does. Python must be present on the + remote machine. + +seealso: + - module: zos_copy + - module: zos_tso_command +""" + +EXAMPLES = r""" +- name: Run a local REXX script on the managed z/OS node. + zos_script: + cmd: ./scripts/HELLO + +- name: Run a local REXX script with args on the managed z/OS node. + zos_script: + cmd: ./scripts/ARGS "1,2" + +- name: Run a remote REXX script while changing its working directory. + zos_script: + cmd: /u/user/scripts/ARGS "1,2" + remote_src: true + chdir: /u/user/output_dir + +- name: Run a local Python script that uses a custom tmp_path. + zos_script: + cmd: ./scripts/program.py + executable: /usr/bin/python3 + tmp_path: /usr/tmp/ibm_zos_core + +- name: Run a local script made from a template. + zos_script: + cmd: ./templates/PROGRAM + use_template: true + +- name: Run a script only when a file is not present. + zos_script: + cmd: ./scripts/PROGRAM + creates: /u/user/pgm_result.txt + +- name: Run a script only when a file is already present on the remote machine. + zos_script: + cmd: ./scripts/PROGRAM + removes: /u/user/pgm_input.txt +""" + +RETURN = r""" +cmd: + description: Original command issued by the user. + returned: changed + type: str + sample: ./scripts/PROGRAM +remote_cmd: + description: + Command executed on the remote machine. Will show the executable + path used, and when running local scripts, will also show the + temporary file used. + returned: changed + type: str + sample: /tmp/zos_script.jycqqfny.ARGS 1,2 +msg: + description: Failure or skip message returned by the module. + returned: failure or skipped + type: str + sample: + File /u/user/file.txt is already missing on the system, skipping script +rc: + description: Return code of the script. + returned: changed + type: int + sample: 16 +stdout: + description: The STDOUT from the script, may be empty. + returned: changed + type: str + sample: Allocation to SYSEXEC completed. +stderr: + description: The STDERR from the script, may be empty. + returned: changed + type: str + sample: An error has ocurred. +stdout_lines: + description: List of strings containing individual lines from STDOUT. + returned: changed + type: list + sample: ["Allocation to SYSEXEC completed."] +stderr_lines: + description: List of strings containing individual lines from STDERR. + returned: changed + type: list + sample: ["An error has ocurred"] +""" + + +import os +import stat +import shlex + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) + + +def run_module(): + module = AnsibleModule( + argument_spec=dict( + chdir=dict(type='str', required=False), + cmd=dict(type='str', required=True), + creates=dict(type='str', required=False), + encoding=dict( + type='dict', + required=False, + options={ + 'from': dict(type='str', required=True,), + 'to': dict(type='str', required=True,) + } + ), + executable=dict(type='str', required=False), + remote_src=dict(type='bool', required=False), + removes=dict(type='str', required=False), + tmp_path=dict(type='str', required=False), + use_template=dict(type='bool', default=False), + template_parameters=dict( + type='dict', + required=False, + options=dict( + variable_start_string=dict(type='str', default='{{'), + variable_end_string=dict(type='str', default='}}'), + block_start_string=dict(type='str', default='{%'), + block_end_string=dict(type='str', default='%}'), + comment_start_string=dict(type='str', default='{#'), + comment_end_string=dict(type='str', default='#}'), + line_statement_prefix=dict(type='str', required=False), + line_comment_prefix=dict(type='str', required=False), + lstrip_blocks=dict(type='bool', default=False), + trim_blocks=dict(type='bool', default=True), + keep_trailing_newline=dict(type='bool', default=False), + newline_sequence=dict( + type='str', + default='\n', + choices=['\n', '\r', '\r\n'] + ), + auto_reload=dict(type='bool', default=False), + ) + ), + ), + supports_check_mode=False + ) + + args_def = dict( + chdir=dict(arg_type='path', required=False), + cmd=dict(arg_type='str', required=True), + creates=dict(arg_type='path', required=False), + executable=dict(arg_type='path', required=False), + remote_src=dict(arg_type='bool', required=False), + removes=dict(arg_type='path', required=False), + tmp_path=dict(arg_type='path', required=False), + use_template=dict(arg_type='bool', required=False), + template_parameters=dict( + arg_type='dict', + required=False, + options=dict( + variable_start_string=dict(arg_type='str', required=False), + variable_end_string=dict(arg_type='str', required=False), + block_start_string=dict(arg_type='str', required=False), + block_end_string=dict(arg_type='str', required=False), + comment_start_string=dict(arg_type='str', required=False), + comment_end_string=dict(arg_type='str', required=False), + line_statement_prefix=dict(arg_type='str', required=False), + line_comment_prefix=dict(arg_type='str', required=False), + lstrip_blocks=dict(arg_type='bool', required=False), + trim_blocks=dict(arg_type='bool', required=False), + keep_trailing_newline=dict(arg_type='bool', required=False), + newline_sequence=dict(arg_type='str', required=False), + auto_reload=dict(arg_type='bool', required=False), + ) + ), + ) + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + + cmd_str = module.params.get('cmd') + cmd_parts = shlex.split(cmd_str) + script_path = cmd_parts[0] + chdir = module.params.get('chdir') + executable = module.params.get('executable') + creates = module.params.get('creates') + removes = module.params.get('removes') + + if creates and os.path.exists(creates): + result = dict( + changed=False, + skipped=True, + msg='File {0} already exists on the system, skipping script'.format(creates) + ) + module.exit_json(**result) + + if removes and not os.path.exists(removes): + result = dict( + changed=False, + skipped=True, + msg='File {0} is already missing on the system, skipping script'.format(removes) + ) + module.exit_json(**result) + + if chdir and not os.path.exists(chdir): + module.fail_json( + msg='The given chdir {0} does not exist on the system.'.format(chdir) + ) + + # Adding owner execute permissions to the script. + # The module will fail if the Ansible user is not the owner! + script_permissions = os.lstat(script_path).st_mode + os.chmod( + script_path, + script_permissions | stat.S_IXUSR + ) + + if executable: + cmd_str = "{0} {1}".format(executable, cmd_str) + + cmd_str = cmd_str.strip() + script_rc, stdout, stderr = module.run_command( + cmd_str, + cwd=chdir + ) + + result = dict( + changed=True, + cmd=module.params.get('cmd'), + remote_cmd=cmd_str, + rc=script_rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) + + # Reverting script's permissions. + os.chmod(script_path, script_permissions) + + if script_rc != 0 or stderr: + result['msg'] = 'The script terminated with an error' + module.fail_json( + **result + ) + + module.exit_json(**result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py new file mode 100644 index 000000000..2bdae2a66 --- /dev/null +++ b/tests/functional/modules/test_zos_script_func.py @@ -0,0 +1,458 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +import pytest +__metaclass__ = type + + +# Using || to concatenate strings without extra spaces. +rexx_script_args = """/* REXX */ +parse arg A ',' B +say 'args are ' || A || ',' || B +return 0 + +""" + +# For validating that chdir gets honored by the module. +rexx_script_chdir = """/* REXX */ +address syscall 'getcwd cwd' +say cwd +return 0 + +""" + +# For testing a default template. Note that the Jinja variable is static +# and it's always called playbook_msg. +rexx_script_template_default = """/* REXX */ +say '{{ playbook_msg }}' +return 0 + +""" + +# For testing templates with custom markers. Here the markers are static +# too (always '((', '))', '&$' and '$&'). +rexx_script_template_custom = """/* REXX */ +&$ This is a comment that should create problems if not substituted $& +say '(( playbook_msg ))' +return 0 + +""" + + +def create_script_content(msg, script_type): + """Returns a string containing either a valid REXX script or a valid + Python script. The script will print the given message.""" + if script_type == 'rexx': + # Without the comment in the first line, the interpreter will not be + # able to run the script. + # Without the last blank line, the REXX interpreter will throw + # an error. + return """/* REXX */ +say '{0}' +return 0 + +""".format(msg) + elif script_type == 'python': + return """msg = "{0}" +print(msg) +""".format(msg) + else: + raise Exception('Type {0} is not valid.'.format(script_type)) + + +def create_python_script_stderr(msg, rc): + """Returns a Python script that will write out to STDERR and return + a given RC. The RC can be 0, but for testing it would be better if it + was something else.""" + return """import sys +print('{0}', file=sys.stderr) +exit({1}) +""".format(msg, rc) + + +def create_local_file(content, suffix): + """Creates a tempfile that has the given content.""" + import os + import tempfile + + fd, file_path = tempfile.mkstemp( + prefix='zos_script', + suffix=suffix + ) + os.close(fd) + + with open(file_path, 'w') as f: + f.write(content) + + return file_path + + +def test_rexx_script_without_args(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + zos_script_result = hosts.all.zos_script( + cmd=script_path + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == msg + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_rexx_remote_script(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + local_script = create_local_file(rexx_script, 'rexx') + + # Using zos_copy instead of doing an echo with shell to avoid trouble + # with how single quotes are handled. + script_path = '/tmp/zos_script_test_script' + copy_result = hosts.all.zos_copy( + src=local_script, + dest=script_path, + mode='600' + ) + for result in copy_result.contacted.values(): + assert result.get('changed') is True + + pre_stat_info = hosts.all.stat(path=script_path) + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + remote_src=True + ) + + post_stat_info = hosts.all.stat(path=script_path) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == msg + assert result.get('stderr', '') == '' + # Checking that permissions remained unchanged after executing + # zos_script. + for pre_stat, post_stat in zip( + pre_stat_info.contacted.values(), + post_stat_info.contacted.values() + ): + assert pre_stat.get('mode') == post_stat.get('mode') + finally: + if os.path.exists(local_script): + os.remove(local_script) + hosts.all.file(path=script_path, state='absent') + + +def test_rexx_script_with_args(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_args + script_path = create_local_file(rexx_script, 'rexx') + + args = '1,2' + cmd = "{0} '{1}'".format(script_path, args) + + zos_script_result = hosts.all.zos_script( + cmd=cmd + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == 'args are {0}'.format(args) + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_rexx_script_chdir(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_chdir + script_path = create_local_file(rexx_script, 'rexx') + + tmp_remote_dir = '/zos_script_tests' + file_result = hosts.all.file( + path=tmp_remote_dir, + state='directory' + ) + + for result in file_result.contacted.values(): + assert result.get('changed') is True + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + chdir=tmp_remote_dir + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == tmp_remote_dir + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + hosts.all.file(path=tmp_remote_dir, state='absent') + + +def test_rexx_script_tmp_path(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = create_script_content('tmp_path test', 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + tmp_remote_dir = '/tmp/zos_script_tests' + file_result = hosts.all.file( + path=tmp_remote_dir, + state='directory' + ) + + for result in file_result.contacted.values(): + assert result.get('changed') is True + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + tmp_path=tmp_remote_dir + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stderr', '') == '' + assert tmp_remote_dir in result.get('remote_cmd', '') + finally: + if os.path.exists(script_path): + os.remove(script_path) + hosts.all.file(path=tmp_remote_dir, state='absent') + + +def test_python_script(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = "Success" + python_script = create_script_content(msg, 'python') + script_path = create_local_file(python_script, 'python') + + python_executable = hosts['options']['ansible_python_path'] + zos_script_result = hosts.all.zos_script( + cmd=script_path, + executable=python_executable + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == msg + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_rexx_script_creates_option(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + remote_file = '/tmp/zos_script_test_creates.txt' + file_result = hosts.all.file( + path=remote_file, + state='touch' + ) + + for result in file_result.contacted.values(): + assert result.get('changed') is True + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + creates=remote_file + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is False + assert result.get('skipped') is True + assert result.get('failed', False) is False + finally: + if os.path.exists(script_path): + os.remove(script_path) + hosts.all.file(path=remote_file, state='absent') + + +def test_rexx_script_removes_option(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + # Not actually creating this file on the remote hosts. + remote_file = '/tmp/zos_script_test_removes.txt' + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + removes=remote_file + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is False + assert result.get('skipped') is True + assert result.get('failed', False) is False + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_script_template_with_default_markers(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_template_default + script_path = create_local_file(rexx_script, 'rexx') + + # Updating the vars available to the tasks. + template_vars = dict( + playbook_msg='Success' + ) + for host in hosts['options']['inventory_manager']._inventory.hosts.values(): + host.vars.update(template_vars) + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + use_template=True + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == template_vars['playbook_msg'] + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_script_template_with_custom_markers(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_template_custom + script_path = create_local_file(rexx_script, 'rexx') + + # Updating the vars available to the tasks. + template_vars = dict( + playbook_msg='Success' + ) + for host in hosts['options']['inventory_manager']._inventory.hosts.values(): + host.vars.update(template_vars) + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + use_template=True, + template_parameters=dict( + variable_start_string='((', + variable_end_string='))', + comment_start_string='&$', + comment_end_string='$&', + ) + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == template_vars['playbook_msg'] + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_python_script_with_stderr(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Error' + rc = 1 + python_script = create_python_script_stderr(msg, rc) + script_path = create_local_file(python_script, 'python') + + python_executable = hosts['options']['ansible_python_path'] + zos_script_result = hosts.all.zos_script( + cmd=script_path, + executable=python_executable + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed') is True + assert result.get('rc') == rc + assert result.get('stdout', '') == '' + assert result.get('stderr', '').strip() == msg + finally: + if os.path.exists(script_path): + os.remove(script_path) diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 74db3a282..8778d80f9 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -74,6 +74,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 420528c74..9ceaf3c97 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -74,6 +74,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index 420528c74..9ceaf3c97 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -74,6 +74,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index a496e3ac8..70d4764e1 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index a496e3ac8..70d4764e1 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index a496e3ac8..70d4764e1 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 8b4540038..a4835475f 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index 4a6c8a778..992ec6099 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -73,6 +73,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported From d41f438e52fea1be692e4413efed65b1d074b53d Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 10:58:31 -0400 Subject: [PATCH 203/413] main change to version checker for consolidation --- plugins/module_utils/job.py | 11 ++++--- plugins/module_utils/zoau_version_checker.py | 31 +++++++++++++------- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 3a9c3b35e..390405d33 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -31,10 +31,9 @@ list_dds = MissingZOAUImport() listing = MissingZOAUImport() -try: - from zoautil_py import ZOAU_API_VERSION -except Exception: - ZOAU_API_VERSION = "1.2.0" +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, timeout=0, start_time=timer()): @@ -255,7 +254,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # this section only works on zoau 1.2.3/+ vvv - if ZOAU_API_VERSION > "1.2.2": + if zoau_version_checker.is_zoau_version_higher_than("1.2.2"): job["job_class"] = entry.job_class job["svc_class"] = entry.svc_class job["priority"] = entry.priority @@ -263,7 +262,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["creation_date"] = str(entry.creation_datetime)[0:10] job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position - if ZOAU_API_VERSION >= "1.2.4": + if zoau_version_checker.is_zoau_version_higher_than("1.2.3"): job["program_name"] = entry.program_name # this section only works on zoau 1.2.3/+ ^^^ diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index c88dac481..601ab6f16 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -13,8 +13,12 @@ from __future__ import absolute_import, division, print_function -import subprocess +# import subprocess +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" __metaclass__ = type @@ -96,16 +100,23 @@ def get_zoau_version_str(): Returns: { [int, int, int] } -- ZOAU version found in format [#,#,#]. There is a provision for a 4th level eg "v1.2.0.1". + """ - zoaversion_out = subprocess.run( - 'zoaversion', shell=True, capture_output=True, check=False - ) + # zoaversion_out = subprocess.run( + # 'zoaversion', shell=True, capture_output=True, check=False + # ) + # version_list = ( + # zoaversion_out + # .stdout + # .decode('UTF-8') + # .strip() + # .split(' ')[3][1:] + # .split('.') + #) + zoaversion_out = ZOAU_API_VERSION.split('.') + version_list = ( - zoaversion_out - .stdout - .decode('UTF-8') - .strip() - .split(' ')[3][1:] - .split('.') + ZOAU_API_VERSION.split('.') ) + return version_list From e547d27cc8111c6a46dfdf5005ff09e03c88080c Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 11:12:20 -0400 Subject: [PATCH 204/413] Removed old version checker/shell call, added changelog entry --- .../1018-internal-consolidate-version-checks.yml | 9 +++++++++ plugins/module_utils/zoau_version_checker.py | 13 ------------- 2 files changed, 9 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1018-internal-consolidate-version-checks.yml diff --git a/changelogs/fragments/1018-internal-consolidate-version-checks.yml b/changelogs/fragments/1018-internal-consolidate-version-checks.yml new file mode 100644 index 000000000..3698ed510 --- /dev/null +++ b/changelogs/fragments/1018-internal-consolidate-version-checks.yml @@ -0,0 +1,9 @@ +trivial: +- zoau_version_check - Change shell call to include call, for higher responsivity. + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) +- zos_operator - Use new version check + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) +- zos_operator_action_query - Use new version check + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) +- utils/job.py - Use new version check + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 601ab6f16..41dd35276 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -102,19 +102,6 @@ def get_zoau_version_str(): provision for a 4th level eg "v1.2.0.1". """ - # zoaversion_out = subprocess.run( - # 'zoaversion', shell=True, capture_output=True, check=False - # ) - # version_list = ( - # zoaversion_out - # .stdout - # .decode('UTF-8') - # .strip() - # .split(' ')[3][1:] - # .split('.') - #) - zoaversion_out = ZOAU_API_VERSION.split('.') - version_list = ( ZOAU_API_VERSION.split('.') ) From c3d9bd12137a42daa7aebd354a44e46796ae4965 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 15:00:37 -0400 Subject: [PATCH 205/413] Changed location of duration value set, so it will always be populated. --- plugins/module_utils/job.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 390405d33..30cb47bb9 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -280,6 +280,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T sleep(1) list_of_dds = list_dds(entry.id) + job["duration"] = duration + for single_dd in list_of_dds: dd = {} @@ -354,11 +356,13 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["ret_code"]["msg"] = tmptext.strip() job["ret_code"]["msg_code"] = None job["ret_code"]["code"] = None - if len(list_of_dds) > 0: + + # if len(list_of_dds) > 0: # The duration should really only be returned for job submit but the code # is used job_output as well, for now we can ignore this point unless # we want to offer a wait_time_s for job output which might be reasonable. - job["duration"] = duration + # Note: Moved this to the upper time loop, so it should always be populated. + # job["duration"] = duration final_entries.append(job) if not final_entries: From faa129ed25ec938ceb86e7fb04fe1cb5d0ba233a Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 15:31:55 -0400 Subject: [PATCH 206/413] found another edge case where duration was not being returned. --- plugins/module_utils/job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 30cb47bb9..cfe8c4a67 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -271,6 +271,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["content_type"] = "" job["ret_code"]["steps"] = [] job["ddnames"] = [] + job["duration"] = duration if dd_scan: list_of_dds = list_dds(entry.id) From 08fbae69b8b7d2cd7e9c9e988b3d24808d6d99c7 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 16:08:58 -0400 Subject: [PATCH 207/413] added tolerance to test on max_rc, where duration is not always returned. --- tests/functional/modules/test_zos_job_submit_func.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index b93b448c7..44dfdbf01 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -529,7 +529,12 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration') >= args["wait_time_s"]: + if result.get('duration'): + duration = result.get('duration') + else: + duration = 0 + + if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) else: assert re.search(r'non-zero', repr(result.get("msg"))) From 1de66d5895297540fe21da252736023ecba51dd1 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 16 Oct 2023 16:06:42 -0700 Subject: [PATCH 208/413] Staging v1.7.0 merge to main (#1019) (#1023) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Galaxy 1.7 updates * Update meta runtime to support ansible-core 2.14 or later * Update ibm_zos_core_meta.yml with updated version * Update readme to align to supported ansible versions and new urls * Added additional sanity ignore files to the exclude list * Added additional sanity ignore files to the exclude list for ansible-lint. * Update copyright yrs for source files that were overlooked * Remove requirements from module doc, rely on offerings minimum requirements, also zoau 1.2.1 never was supported * Add changelog summary for 1.7 * Adding generated antsibull-changelog release changelog and artifacts * Remove v1.7.0_summary, its no longer needed * Update release notes for ac 1.7.0 * Remove unsupported collection versions requiring a version of zoau that is EOS --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .ansible-lint | 4 + CHANGELOG.rst | 36 +--- README.md | 6 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 14 ++ docs/source/modules/zos_gather_facts.rst | 5 - docs/source/release_notes.rst | 202 +++++------------------ galaxy.yml | 6 +- meta/ibm_zos_core_meta.yml | 2 +- meta/runtime.yml | 2 +- plugins/action/zos_copy.py | 2 +- plugins/modules/zos_apf.py | 2 +- plugins/modules/zos_encode.py | 2 +- plugins/modules/zos_gather_facts.py | 2 - 14 files changed, 78 insertions(+), 209 deletions(-) diff --git a/.ansible-lint b/.ansible-lint index ac0ca0b26..7325803a2 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -25,7 +25,11 @@ exclude_paths: - tests/helpers - tests/requirements.txt - tests/unit + - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt + - tests/sanity/ignore-2.12.txt + - tests/sanity/ignore-2.13.txt - venv* parseable: true quiet: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c6b3a91e0..a7c787d05 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,38 +5,13 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.7.0-beta.2 -============= - -Release Summary ---------------- - -Release Date: '2023-08-21' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Minor Changes -------------- - -- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - -Bugfixes --------- - -- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - -v1.7.0-beta.1 -============= +v1.7.0 +====== Release Summary --------------- -Release Date: '2023-07-26' +Release Date: '2023-10-09' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -51,6 +26,8 @@ Minor Changes ------------- - Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) +- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) - zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) - zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) @@ -58,11 +35,14 @@ Minor Changes - zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) - zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) - zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). Bugfixes -------- - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) - zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) - zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). diff --git a/README.md b/README.md index 5cbd6fd98..13f45889f 100644 --- a/README.md +++ b/README.md @@ -50,14 +50,14 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.16. -The Ansible and Ansible Core versions supported for this collection align to the +This collection has been tested against **Ansible Core** versions >=2.14. +The Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** and **ansible-core**. For **Ansible Automation Platform** (AAP) users, review the -[Ansible Automation Platform Certified Content](https://access.redhat.com/articles/3642632) +[Ansible Automation Platform Certified Content](https://access.redhat.com/support/articles/ansible-automation-platform-certified-content) and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) for more more information on supported versions of Ansible. diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index c07ea8e62..810d65965 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -126,4 +126,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.7.0-beta.2 +version: 1.7.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 753c8e318..6988760f9 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -875,6 +875,20 @@ releases: name: zos_volume_init namespace: '' release_date: '2023-04-26' + 1.7.0: + changes: + release_summary: 'Release Date: ''2023-10-09'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - v1.7.0_summary.yml + release_date: '2023-10-09' 1.7.0-beta.1: changes: bugfixes: diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 232cc26ba..63bd22701 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -22,11 +22,6 @@ Synopsis - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. -Requirements ------------- - -- ZOAU 1.2.1 or later. - diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 9a7bdb059..de1a27013 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,47 +6,14 @@ Releases ======== -Version 1.7.0-beta.2 -==================== - -Minor Changes -------------- -- ``zos_archive`` - - - When xmit faces a space error in xmit operation because of dest or log data set being filled raises an appropriate error hint. - - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. - -- ``zos_unarchive`` - - - When copying to remote fails now a proper error message is displayed. - - When copying to remote if space_primary is not defined, then is defaulted to 5M. - -Bugfixes --------- -- ``zos_archive`` - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. - -Availability ------------- - -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS V2R3`_ or later -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. - -Version 1.7.0-beta.1 -==================== +Version 1.7.0 +============= New Modules ----------- -- ``zos_archive`` - archive files, data sets and extend archives on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. -- ``zos_unarchive`` - unarchive files and data sets in z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. +- ``zos_archive`` - archive files, data sets and extend archives on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. +- ``zos_unarchive`` - unarchive files and data sets on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. Major Changes ------------- @@ -60,36 +27,48 @@ Minor Changes - displays the data set attributes when the destination does not exist and was created by the module. - reverts the logic that would automatically create backups in the event of a module failure leaving it up to the user to decide if a backup is needed. - ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. -- ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). +- ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v1.2.4 or later). - ``zos_job_query`` + - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - removes unnecessary queries to find DDs improving the modules performance. -- ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). +- ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v1.2.4 or later). +- ``zos_archive`` + + - When XMIT encounters a space error because of the destination (dest) or log data set has reached capacity, the module raises an appropriate error message. + - When the destination (dest) data set space is not provided, then the module computes it using the source (src) given the pattern provided. + +- ``zos_unarchive`` + + - When copying to the z/OS managed node (remote_src) results in a failure, a proper error message is displayed + - When copying to the z/OS managed node (remote_src), if the option *primary_space* is not defined, then it is defaulted to 5M. Bugfixes -------- -- ``zos_data_set`` - fixes occasionally occurring orphaned VSAM cluster components such as INDEX when `present=absent`. -- ``zos_fetch`` - fixes the warning that appeared about the use of _play_context.verbosity. +- ``zos_data_set`` - fixes occasionally occurring orphaned VSAM cluster components such as INDEX when *present=absent*. +- ``zos_fetch`` - fixes the warning that appeared about the use of *_play_context.verbosity*. - ``zos_copy`` - - fixes the warning that appeared about the use of _play_context.verbosity. + - fixes the warning that appeared about the use of *_play_context.verbosity*. - fixes an issue where subdirectories would not be encoded. - fixes an issue where when mode was set, the mode was not applied to existing directories and files. - - displays a error message when copying into a data set that is being accessed by another process and no longer returns with `changed=true`. + - displays a error message when copying into a data set that is being accessed by another process and no longer returns with *changed=true*. -``zos_job_output`` - displays an appropriate error message for a job is not found in the spool. -``zos_operator`` - fixes the false reports that a command failed when keywords such as *error* were seen, the module now acts as a passthrough. +- ``zos_job_output`` - displays an appropriate error message for a job is not found in the spool. +- ``zos_operator`` - fixes the false reports that a command failed when keywords such as *error* were seen, the module now acts as a passthrough. +- ``zos_archive`` - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. @@ -152,7 +131,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -268,7 +247,7 @@ Availability Reference --------- -* Supported by `z/OS Version`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -307,7 +286,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -457,7 +436,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -558,7 +537,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 - `IBM Open Enterprise SDK for Python`_ v3.9.5 @@ -599,7 +578,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -645,7 +624,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -677,7 +656,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -800,7 +779,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -822,115 +801,6 @@ Known issues "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. -Version 1.2.1 -============= - -Notes ------ - -* Update required -* Module changes - - * Noteworthy Python 2.x support - - * encode - removed TemporaryDirectory usage. - * zos_copy - fixed regex support, dictionary merge operation fix - * zos_fetch - fix quote import - -* Collection changes - - * Beginning this release, all sample playbooks previously included with the - collection will be made available on the `samples repository`_. The - `samples repository`_ explains the playbook concepts, - discusses z/OS administration, provides links to the samples support site, - blogs and other community resources. - -* Documentation changes - - * In this release, documentation related to playbook configuration has been - migrated to the `samples repository`_. Each sample contains a README that - explains what configurations must be made to run the sample playbook. - -.. _samples repository: - https://github.com/IBM/z_ansible_collections_samples/blob/main/README.md - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by IBM Open Enterprise Python for z/OS: 3.8.2 or later -* Supported by IBM Z Open Automation Utilities 1.0.3 PTF UI70435 -* Supported by z/OS V2R3 or later -* The z/OS® shell - -Version 1.1.0 -============= - -Notes ------ -* Update recommended -* New modules - - * zos_fetch - * zos_encode - * zos_operator_action_query - * zos_operator - * zos_tso_command - * zos_ping - -* New filter -* Improved error handling and messages -* Bug fixes -* Documentation updates -* New samples - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by IBM Open Enterprise Python for z/OS: 3.8.2 or later -* Supported by IBM Z Open Automation Utilities: 1.0.3 PTF UI70435 -* Supported by z/OS V2R3 -* The z/OS® shell - - -Version 1.0.0 -============= - -Notes ------ - -* Update recommended -* Security vulnerabilities fixed -* Improved test, security and injection coverage -* Module zos_data_set catalog support added -* Documentation updates - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by IBM Z Open Automation Utilities: 1.0.1 PTF UI66957 through - 1.0.3 PTF UI70435 - .. ............................................................................. .. Global Links .. ............................................................................. @@ -960,8 +830,12 @@ Reference https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm +.. _z/OS®: + https://www.ibm.com/docs/en/zos .. _z/OS V2R3: https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html +.. _z/OS V2R4: + https://www.ibm.com/docs/en/zos/2.4.0 .. _z/OS Version: https://www.ibm.com/docs/en/zos .. _FAQs: diff --git a/galaxy.yml b/galaxy.yml index b1090564c..f5c0ccf46 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.7.0-beta.2 +version: 1.7.0 # Collection README file readme: README.md @@ -91,5 +91,9 @@ build_ignore: - tests/helpers - tests/requirements.txt - tests/unit + - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt + - tests/sanity/ignore-2.12.txt + - tests/sanity/ignore-2.13.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index f659df786..51e4c7392 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.7.0-beta.2" +version: "1.7.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/meta/runtime.yml b/meta/runtime.yml index 576832bc7..be99ccf4b 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9.0' +requires_ansible: '>=2.14.0' diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 6b86d24a3..d65c4a468 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2021, 2022 +# Copyright (c) IBM Corporation 2019-2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index d3a945d1b..d0fec1ff5 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index a4a92a985..2628ab174 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index a3475be11..b7aeb7ee4 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -23,8 +23,6 @@ module: zos_gather_facts short_description: Gather z/OS system facts. version_added: '1.5.0' -requirements: - - ZOAU 1.2.1 or later. author: - "Ketan Kelkar (@ketankelkar)" description: From 312de14acb22af9303edfc9299cc5e9a6e9122db Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 16 Oct 2023 17:26:23 -0600 Subject: [PATCH 209/413] Enabler/validate path join part 2 (#1029) * Added real path fetch to base * Sec changes * Updated changelog and template * Added validation to zos_copy and zos_fetch Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Corrected positional argument * Added validation changes Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed f-string * Fixed path join for copy_to_file * Added validation function to template * Added new files * Added changelog fragment --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1029-validate-path-join.yml | 7 +++++++ plugins/module_utils/template.py | 20 ++++++++++++++----- plugins/modules/zos_archive.py | 14 ++++++++++--- plugins/modules/zos_unarchive.py | 9 +++++---- 4 files changed, 38 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1029-validate-path-join.yml diff --git a/changelogs/fragments/1029-validate-path-join.yml b/changelogs/fragments/1029-validate-path-join.yml new file mode 100644 index 000000000..234cf3148 --- /dev/null +++ b/changelogs/fragments/1029-validate-path-join.yml @@ -0,0 +1,7 @@ +minor_changes: + - zos_archive: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - module_utils/template: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) \ No newline at end of file diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 407a231c6..419b997b2 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -238,7 +238,7 @@ def render_file_template(self, file_path, variables): )) try: - template_file_path = path.join(temp_template_dir, file_path) + template_file_path = path.join(validation.validate_safe_path(temp_template_dir), validation.validate_safe_path(file_path)) with open(template_file_path, mode="w", encoding=self.encoding) as template: template.write(rendered_contents) # There could be encoding errors. @@ -300,9 +300,16 @@ def render_dir_template(self, variables): for dirpath, subdirs, files in os.walk(self.template_dir): for template_file in files: - relative_dir = os.path.relpath(dirpath, self.template_dir) - file_path = os.path.normpath(os.path.join(relative_dir, template_file)) - + relative_dir = os.path.relpath( + validation.validate_safe_path(dirpath), + validation.validate_safe_path(self.template_dir) + ) + file_path = os.path.normpath( + os.path.join( + validation.validate_safe_path(relative_dir), + validation.validate_safe_path(template_file) + ) + ) try: template = self.templating_env.get_template(file_path) rendered_contents = template.render(variables) @@ -318,7 +325,10 @@ def render_dir_template(self, variables): )) try: - template_file_path = os.path.join(temp_template_dir, file_path) + template_file_path = os.path.join( + validation.validate_safe_path(temp_template_dir), + validation.validate_safe_path(file_path) + ) os.makedirs(os.path.dirname(template_file_path), exist_ok=True) with open(template_file_path, mode="w", encoding=self.encoding) as temp: temp.write(rendered_contents) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index c48fd767e..f5306bb25 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -420,7 +420,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, - mvs_cmd) + validation, + mvs_cmd, +) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( MissingZOAUImport, ) @@ -650,11 +652,17 @@ def archive_targets(self): if os.path.isdir(target): for directory_path, directory_names, file_names in os.walk(target, topdown=True): for directory_name in directory_names: - full_path = os.path.join(directory_path, directory_name) + full_path = os.path.join( + validation.validate_safe_path(directory_path), + validation.validate_safe_path(directory_name) + ) self.add(full_path, strip_prefix(self.arcroot, full_path)) for file_name in file_names: - full_path = os.path.join(directory_path, file_name) + full_path = os.path.join( + validation.validate_safe_path(directory_path), + validation.validate_safe_path(file_name) + ) self.add(full_path, strip_prefix(self.arcroot, full_path)) else: self.add(target, strip_prefix(self.arcroot, target)) diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 3f79fc789..be7c93f5c 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -386,6 +386,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, + validation, mvs_cmd) import re import os @@ -447,7 +448,7 @@ def update_permissions(self): Update permissions in unarchived files. """ for target in self.targets: - file_name = os.path.join(self.dest, target) + file_name = os.path.join(validation.validate_safe_path(self.dest), validation.validate_safe_path(target)) file_args = self.module.load_file_common_arguments(self.module.params, path=file_name) self.module.set_fs_attributes_if_different(file_args, self.changed) @@ -906,13 +907,13 @@ def tar_filter(member, dest_path): name = member.path.lstrip('/' + os.sep) if os.path.isabs(name): raise AbsolutePathError - target_path = os.path.realpath(os.path.join(dest_path, name)) + target_path = os.path.realpath(os.path.join(validation.validate_safe_path(dest_path), validation.validate_safe_path(name))) if os.path.commonpath([target_path, dest_path]) != dest_path: raise OutsideDestinationError(member, target_path) if member.islnk() or member.issym(): if os.path.isabs(member.linkname): raise AbsoluteLinkError(member) - target_path = os.path.realpath(os.path.join(dest_path, member.linkname)) + target_path = os.path.realpath(os.path.join(validation.validate_safe_path(dest_path), validation.validate_safe_path(member.linkname))) if os.path.commonpath([target_path, dest_path]) != dest_path: raise LinkOutsideDestinationError(member, target_path) @@ -923,7 +924,7 @@ def zip_filter(member, dest_path): name = name.lstrip('/' + os.sep) if os.path.isabs(name): raise AbsolutePathError - target_path = os.path.realpath(os.path.join(dest_path, name)) + target_path = os.path.realpath(os.path.join(validation.validate_safe_path(dest_path), validation.validate_safe_path(name))) if os.path.commonpath([target_path, dest_path]) != dest_path: raise OutsideDestinationError(member, target_path) From 36c2684847e0fa6fddb7228a66f2c81687a92c75 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 19 Oct 2023 23:49:43 -0700 Subject: [PATCH 210/413] Add known issues doc for utf8 issues (#1035) * Add known issues doc for utf8 issues Signed-off-by: ddimatos <dimatos@gmail.com> * Updated changelog fragement to remove colon usage Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected changelog errors Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1029-validate-path-join.yml | 10 ++++---- .../1034-document-utf8-known-issue.yml | 23 +++++++++++++++++++ ...-operator-response-come-back-truncate.yaml | 6 ++--- .../934-Remove-conditional-unnecessary.yml | 2 +- ...nhance-Add-wait-zos-operator-and-query.yml | 4 ++-- ...or-zos-copy-and-remove-temporary-files.yml | 4 ++-- ...os-job-submit-truncate-final-character.yml | 2 +- .../fragments/963-validate-path-join.yml | 4 ++-- .../fragments/965-enhance-archive-tests.yml | 4 ++-- 9 files changed, 41 insertions(+), 18 deletions(-) create mode 100644 changelogs/fragments/1034-document-utf8-known-issue.yml diff --git a/changelogs/fragments/1029-validate-path-join.yml b/changelogs/fragments/1029-validate-path-join.yml index 234cf3148..785c1a41b 100644 --- a/changelogs/fragments/1029-validate-path-join.yml +++ b/changelogs/fragments/1029-validate-path-join.yml @@ -1,7 +1,7 @@ minor_changes: - - zos_archive: Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_unarchive: Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - module_utils/template: Add validation into path joins to detect unauthorized path traversals. + - zos_archive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) \ No newline at end of file diff --git a/changelogs/fragments/1034-document-utf8-known-issue.yml b/changelogs/fragments/1034-document-utf8-known-issue.yml new file mode 100644 index 000000000..860fd3f6c --- /dev/null +++ b/changelogs/fragments/1034-document-utf8-known-issue.yml @@ -0,0 +1,23 @@ +known_issues: + - Several modules have reported UTF8 decoding errors when interacting with results + that contain non-printable UTF8 characters in the response. This occurs when + a module receives content that does not correspond to a UTF-8 value. + These include modules `zos_job_submit`, `zos_job_output`, + `zos_operator_action_query` but are not limited to this list. + This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. + Each case is unique, some options to work around the error are below. + - Specify that the ASA assembler option be enabled to instruct the assembler + to use ANSI control characters instead of machine code control characters. + - Add `ignore_errors:true` to the playbook task so the task error will not + fail the playbook. + - If the error is resulting from a batch job, add `ignore_errors:true` to the + task and capture the output into a variable and extract the job ID with a + regular expression and then use `zos_job_output` to display the DD without + the non-printable character such as the DD `JESMSGLG`. + (https://github.com/ansible-collections/ibm_zos_core/issues/677) + (https://github.com/ansible-collections/ibm_zos_core/issues/776) + (https://github.com/ansible-collections/ibm_zos_core/issues/972) + - With later versions of `ansible-core` used with `ibm_zos_core` collection a + warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" + that is currently being reviewed. There are no recommendations at this point. + (https://github.com/ansible-collections/ibm_zos_core/issues/983) diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml index 58900fc01..1e2d3c10f 100644 --- a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml +++ b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml @@ -1,4 +1,4 @@ bugfixes: -- zos_operator: The last line of the operator was missing in the response of the module. - Fix now ensures the presence of the full output of the operator. - (https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file + - zos_operator - The last line of the operator was missing in the response of + the module. The fix now ensures the presence of the full output of the operator. + https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/changelogs/fragments/934-Remove-conditional-unnecessary.yml b/changelogs/fragments/934-Remove-conditional-unnecessary.yml index bf07c7f32..3ceeffa99 100644 --- a/changelogs/fragments/934-Remove-conditional-unnecessary.yml +++ b/changelogs/fragments/934-Remove-conditional-unnecessary.yml @@ -1,2 +1,2 @@ -- trivial: +trivial: - zos_blockinfile - remove test conditional unnecessary (https://github.com/ansible-collections/ibm_zos_core/pull/934). \ No newline at end of file diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 5a8202c34..91f920145 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,8 +1,8 @@ minor_changes: - - zos_operator: Changed system to call 'wait=true' parameter to zoau call. + - zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query: Add a max delay of 5 seconds on each part of the operator_action_query. + - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) diff --git a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml index c90921c9f..99a0599ec 100644 --- a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml +++ b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml @@ -1,7 +1,7 @@ bugfixes: - - zos_job_submit: Temporary files were created in tmp directory. + - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) minor_changes: - - zos_job_submit: Change action plugin call from copy to zos_copy. + - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) \ No newline at end of file diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml index aca865791..7a4ce88cb 100644 --- a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml +++ b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml @@ -1,4 +1,4 @@ bugfixes: -- zos_job_submit: The last line of the jcl was missing in the input. +- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file diff --git a/changelogs/fragments/963-validate-path-join.yml b/changelogs/fragments/963-validate-path-join.yml index 017c793cc..129af357e 100644 --- a/changelogs/fragments/963-validate-path-join.yml +++ b/changelogs/fragments/963-validate-path-join.yml @@ -1,5 +1,5 @@ minor_changes: - - zos_fetch: Add validation into path joins to detect unauthorized path traversals. + - zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) - - zos_copy: Add validation into path joins to detect unauthorized path traversals. + - zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) \ No newline at end of file diff --git a/changelogs/fragments/965-enhance-archive-tests.yml b/changelogs/fragments/965-enhance-archive-tests.yml index b86bf22bf..80705e4c1 100644 --- a/changelogs/fragments/965-enhance-archive-tests.yml +++ b/changelogs/fragments/965-enhance-archive-tests.yml @@ -1,5 +1,5 @@ minor_changes: - - zos_archive: Enhanced test cases to use test lines the same length of the record length. + - zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) - - zos_unarchive: Enhanced test cases to use test lines the same length of the record length. + - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) \ No newline at end of file From 9d3cbebc71fba42df88393ad2d1517c4671513c8 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 24 Oct 2023 09:23:15 -0700 Subject: [PATCH 211/413] [Enhancement] [zos_copy] Add support for ASA control chars (#1028) * Added ASA support when copying from datasets with control chars * Added ASA support when copying from USS to sequential data sets * Added ASA support when copying from USS to PDS/E * Re-enabled copy from datasets to USS * Added copy from non-ASA data sets to seq ASA ones * Added copy from non-ASA data sets to partitioned ones * Added allocation of ASA destination data sets * Added first version of ASA compatibility validation * Added validations for asa_text * Added asa_text documentation * Fixed record_length issue when creating ASA data sets * Fixed record_length issue * Added asa_text example * Added first test * Added DBRM doc to zos_copy module (#1025) * Added DBRM doc to zos_copy module Signed-off-by: ddimatos <dimatos@gmail.com> * Removed an extra space from doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added more tests * Added changelog fragment * Updated RST files * Removed unused import * Fixed merge * Added another validation * Updated docs * Changed use of tsocmd for mvscmd * Fixed merge error * Fixed bug with VSAM sources * Fixed bug with VSAM destinations --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> --- .../fragments/1028-asa-control-chars.yml | 4 + docs/source/modules/zos_copy.rst | 44 ++- docs/source/modules/zos_operator.rst | 19 +- docs/source/modules/zos_script.rst | 12 +- plugins/action/zos_copy.py | 9 + plugins/module_utils/copy.py | 96 ++++- plugins/module_utils/data_set.py | 8 +- plugins/modules/zos_copy.py | 301 ++++++++++++--- .../functional/modules/test_zos_copy_func.py | 344 ++++++++++++++++++ tests/pytest.ini | 3 +- 10 files changed, 769 insertions(+), 71 deletions(-) create mode 100644 changelogs/fragments/1028-asa-control-chars.yml diff --git a/changelogs/fragments/1028-asa-control-chars.yml b/changelogs/fragments/1028-asa-control-chars.yml new file mode 100644 index 000000000..6afc35e50 --- /dev/null +++ b/changelogs/fragments/1028-asa-control-chars.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_copy: add support in zos_copy for text files and data sets containing ASA + control characters. + (https://github.com/ansible-collections/ibm_zos_core/pull/1028) \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 191570bae..e19332bf4 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -26,6 +26,23 @@ Parameters ---------- +asa_text + If set to ``true``, indicates that either ``src`` or ``dest`` or both contain ASA control characters. + + When ``src`` is a USS file and ``dest`` is a data set, the copy will preserve ASA control characters in the destination. + + When ``src`` is a data set containing ASA control characters and ``dest`` is a USS file, the copy will put all control characters as plain text in the destination. + + If ``dest`` is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). + + If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. + + This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + + | **required**: False + | **type**: bool + + backup Specifies whether a backup of the destination should be created before copying data. @@ -140,6 +157,19 @@ force | **type**: bool +force_lock + By default, when c(dest) is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. + + If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. + + Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. + + If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. + + | **required**: False + | **type**: bool + + ignore_sftp_stderr During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. @@ -150,7 +180,11 @@ ignore_sftp_stderr is_binary - If set to ``true``, indicates that the file or data set to be copied is a binary file/data set. + If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. + + When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. + + Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -706,7 +740,7 @@ Examples executable: true aliases: true - - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE + - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE zos_copy: src: '/home/loadlib/' dest: HLQ.LOADLIB.NEW @@ -714,6 +748,12 @@ Examples executable: true aliases: true + - name: Copy a file with ASA characters to a new sequential data set. + zos_copy: + src: ./files/print.txt + dest: HLQ.PRINT.NEW + asa_text: true + diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index b05b0331a..e0f65414f 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -58,18 +58,6 @@ wait_time_s | **default**: 1 -wait - Configuring wait used by the `zos_operator <./zos_operator.html>`_ module has been deprecated and will be removed in a future ibm.ibm_zos_core collection. - - Setting this option will yield no change, it is deprecated. - - Review option *wait_time_s* to instruct operator commands to wait. - - | **required**: False - | **type**: bool - | **default**: True - - Examples @@ -91,16 +79,11 @@ Examples zos_operator: cmd: "\\$PJ(*)" - - name: Execute operator command to show jobs, waiting up to 5 seconds for response + - name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 5 - - name: Execute operator command to show jobs, always waiting 7 seconds for response - zos_operator: - cmd: 'd a,all' - wait_time_s: 7 - - name: Display the system symbols and associated substitution texts. zos_operator: cmd: 'D SYMBOLS' diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 6fc9a0ece..29d9bb2df 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -103,12 +103,14 @@ removes tmp_path - Path in the remote machine where local scripts will be temporarily copied to. + Directory path in the remote machine where local scripts will be temporarily copied to. When not specified, the module will copy local scripts to the default temporary path for the user. If ``tmp_path`` does not exist in the remote machine, the module will not create it. + All scripts copied to ``tmp_path`` will be removed from the managed node before the module finishes executing. + | **required**: False | **type**: str @@ -292,7 +294,9 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. - Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for the script will be restored by the module before the task ends. + Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for remote scripts will be restored by the module before the task ends. + + The module will only add execution permissions for the file owner. If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. @@ -302,7 +306,9 @@ Notes `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + + The community script module does not rely on Python to execute scripts on a managed node, while this module does. Python must be present on the remote machine. diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index d65c4a468..d7d00eb64 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -61,6 +61,7 @@ def run(self, tmp=None, task_vars=None): is_binary = _process_boolean(task_args.get('is_binary'), default=False) force_lock = _process_boolean(task_args.get('force_lock'), default=False) executable = _process_boolean(task_args.get('executable'), default=False) + asa_text = _process_boolean(task_args.get('asa_text'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) backup_name = task_args.get("backup_name", None) encoding = task_args.get("encoding", None) @@ -117,6 +118,14 @@ def run(self, tmp=None, task_vars=None): msg = "Backup file provided but 'backup' parameter is False" return self._exit_action(result, msg, failed=True) + if is_binary and asa_text: + msg = "Both 'is_binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." + return self._exit_action(result, msg, failed=True) + + if executable and asa_text: + msg = "Both 'executable' and 'asa_text' are True. Unable to copy an executable as an ASA text file." + return self._exit_action(result, msg, failed=True) + use_template = _process_boolean(task_args.get("use_template"), default=False) if remote_src and use_template: msg = "Use of Jinja2 templates is only valid for local files, remote_src cannot be set to true." diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 7edd8a49c..ac9e74758 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -22,6 +22,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( + ikjeft01 +) if PY3: from shlex import quote @@ -110,7 +113,7 @@ def copy_ps2uss(src, dest, is_binary=False): return rc, out, err -def copy_pds2uss(src, dest, is_binary=False): +def copy_pds2uss(src, dest, is_binary=False, asa_text=False): """Copy the whole PDS(E) to a uss path Arguments: @@ -119,6 +122,8 @@ def copy_pds2uss(src, dest, is_binary=False): Keyword Arguments: is_binary: {bool} -- Whether the file to be copied contains binary data + asa_text: {bool} -- Whether the file to be copied contains ASA control + characters Raises: USSCmdExecError: When any exception is raised during the conversion. @@ -130,12 +135,22 @@ def copy_pds2uss(src, dest, is_binary=False): module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) dest = _validate_path(dest) + cp_pds2uss = "cp -U -F rec \"//'{0}'\" {1}".format(src, quote(dest)) - if is_binary: + + # When dealing with ASA control chars, each record follows a + # different format than what '-F rec' means, so we remove it + # to allow the system to leave the control chars in the + # destination. + if asa_text: + cp_pds2uss = cp_pds2uss.replace("-F rec", "", 1) + elif is_binary: cp_pds2uss = cp_pds2uss.replace("rec", "bin", 1) + rc, out, err = module.run_command(cp_pds2uss) if rc: raise USSCmdExecError(cp_pds2uss, rc, out, err) + return rc, out, err @@ -216,6 +231,83 @@ def copy_vsam_ps(src, dest): return rc, out, err +def copy_asa_uss2mvs(src, dest): + """Copy a file from USS to an ASA sequential data set or PDS/E member. + + Arguments: + src: {str} -- Path of the USS file + dest: {str} -- The MVS destination data set or member + + Returns: + boolean -- The return code after the copy command executed successfully + str -- The stdout after the copy command executed successfully + str -- The stderr after the copy command executed successfully + """ + oget_cmd = "OGET '{0}' '{1}'".format(src, dest) + rc, out, err = ikjeft01(oget_cmd, authorized=True) + + return TSOCmdResponse(rc, out, err) + + +def copy_asa_mvs2uss(src, dest): + """Copy an ASA sequential data set or member to USS. + + Arguments: + src: {str} -- The MVS data set to be copied + dest: {str} -- Destination path in USS + + Returns: + boolean -- The return code after the copy command executed successfully + str -- The stdout after the copy command executed successfully + str -- The stderr after the copy command executed successfully + """ + src = _validate_data_set_name(src) + dest = _validate_path(dest) + + oput_cmd = "OPUT '{0}' '{1}'".format(src, dest) + rc, out, err = ikjeft01(oput_cmd, authorized=True) + + return TSOCmdResponse(rc, out, err) + + +def copy_asa_pds2uss(src, dest): + """Copy all members from an ASA PDS/E to USS. + + Arguments: + src: {str} -- The MVS data set to be copied + dest: {str} -- Destination path in USS (must be a directory) + + Returns: + boolean -- The return code after the copy command executed successfully + str -- The stdout after the copy command executed successfully + str -- The stderr after the copy command executed successfully + """ + from os import path + from zoautil_py import datasets + + src = _validate_data_set_name(src) + dest = _validate_path(dest) + + for member in datasets.list_members(src): + src_member = '{0}({1})'.format(src, member) + dest_path = path.join(dest, member) + + oput_cmd = "OPUT '{0}' '{1}'".format(src_member, dest_path) + rc, out, err = ikjeft01(oput_cmd, authorized=True) + + if rc != 0: + return TSOCmdResponse(rc, out, err) + + return TSOCmdResponse(0, '', '') + + +class TSOCmdResponse(): + def __init__(self, rc, stdout, stderr): + self.rc = rc + self.stdout_response = stdout + self.stderr_response = stderr + + class USSCmdExecError(Exception): def __init__(self, uss_cmd, rc, out, err): self.msg = ( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 69e1190f1..cbeb7eb7d 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -278,7 +278,7 @@ def ensure_uncataloged(name): return False @staticmethod - def allocate_model_data_set(ds_name, model, vol=None): + def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): """Allocates a data set based on the attributes of a 'model' data set. Useful when a data set needs to be created identical to another. Supported model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), @@ -291,6 +291,8 @@ def allocate_model_data_set(ds_name, model, vol=None): must be used. See extract_dsname(ds_name) in data_set.py model {str} -- The name of the data set whose allocation parameters should be used to allocate the new data set 'ds_name' + asa_text {bool} -- Whether the new data set should support ASA control + characters (have record format FBA) vol {str} -- The volume where data set should be allocated Raise: @@ -321,6 +323,10 @@ def allocate_model_data_set(ds_name, model, vol=None): alloc_cmd = """{0} - VOLUME({1})""".format(alloc_cmd, vol.upper()) + if asa_text: + alloc_cmd = """{0} - + RECFM(F,B,A)""".format(alloc_cmd) + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True) if rc != 0: raise MVSCmdExecError(rc, out, err) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index c671d87a0..9bafdc471 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -30,6 +30,25 @@ - "Demetrios Dimatos (@ddimatos)" - "Ivan Moreno (@rexemin)" options: + asa_text: + description: + - If set to C(true), indicates that either C(src) or C(dest) or both + contain ASA control characters. + - When C(src) is a USS file and C(dest) is a data set, the copy will + preserve ASA control characters in the destination. + - When C(src) is a data set containing ASA control characters and + C(dest) is a USS file, the copy will put all control characters as + plain text in the destination. + - If C(dest) is a non-existent data set, it will be created with record + format Fixed Block with ANSI format (FBA). + - If neither C(src) or C(dest) have record format Fixed Block with ANSI + format (FBA) or Variable Block with ANSI format (VBA), the module + will fail. + - This option is only valid for text files. If C(is_binary) is C(true) + or C(executable) is C(true) as well, the module will fail. + type: bool + default: false + required: false backup: description: - Specifies whether a backup of the destination should be created before @@ -185,7 +204,11 @@ is_binary: description: - If set to C(true), indicates that the file or data set to be copied is a - binary file/data set. + binary file or data set. + - When I(is_binary=true), no encoding conversion is applied to the content, + all content transferred retains the original state. + - Use I(is_binary=true) when copying a Database Request Module (DBRM) to + retain the original state of the serialized SQL statements of a program. type: bool default: false required: false @@ -616,6 +639,12 @@ remote_src: true executable: true aliases: true + +- name: Copy a file with ASA characters to a new sequential data set. + zos_copy: + src: ./files/print.txt + dest: HLQ.PRINT.NEW + asa_text: true """ RETURN = r""" @@ -815,6 +844,7 @@ def __init__( is_binary=False, executable=False, aliases=False, + asa_text=False, backup_name=None, force_lock=False, ): @@ -838,6 +868,7 @@ def __init__( self.module = module self.is_binary = is_binary self.executable = executable + self.asa_text = asa_text self.aliases = aliases self.backup_name = backup_name self.force_lock = force_lock @@ -851,7 +882,8 @@ def copy_to_seq( src, temp_path, conv_path, - dest + dest, + src_type ): """Copy source to a sequential data set. @@ -864,18 +896,24 @@ def copy_to_seq( transferred data to conv_path {str} -- Path to the converted source file dest {str} -- Name of destination data set + src_type {str} -- Type of the source """ new_src = conv_path or temp_path or src copy_args = dict() copy_args["options"] = "" - if self.is_binary: - copy_args["options"] = "-B" + if src_type == 'USS' and self.asa_text: + response = copy.copy_asa_uss2mvs(new_src, dest) + else: + # While ASA files are just text files, we do a binary copy + # so dcp doesn't introduce any additional blanks or newlines. + if self.is_binary or self.asa_text: + copy_args["options"] = "-B" - if self.force_lock: - copy_args["options"] += " -f" + if self.force_lock: + copy_args["options"] += " -f" - response = datasets._copy(new_src, dest, None, **copy_args) + response = datasets._copy(new_src, dest, None, **copy_args) if response.rc != 0: raise CopyOperationError( msg="Unable to copy source {0} to {1}".format(new_src, dest), @@ -1118,6 +1156,7 @@ def __init__( module, is_binary=False, executable=False, + asa_text=False, aliases=False, common_file_args=None, backup_name=None, @@ -1136,7 +1175,12 @@ def __init__( backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( - module, is_binary=is_binary, executable=executable, aliases=aliases, backup_name=backup_name + module, + is_binary=is_binary, + executable=executable, + asa_text=asa_text, + aliases=aliases, + backup_name=backup_name ) self.common_file_args = common_file_args @@ -1162,11 +1206,13 @@ def copy_to_uss( src_ds_type {str} -- Type of source src_member {bool} -- Whether src is a data set member member_name {str} -- The name of the source data set member - force {bool} -- Wheter to copy files to an already existing directory + force {bool} -- Whether to copy files to an already existing directory Returns: {str} -- Destination where the file was copied to """ + changed_files = None + if src_ds_type in data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED): self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name @@ -1434,10 +1480,13 @@ def _mvs_copy_to_uss( try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: - if self.executable: + if self.asa_text: + response = copy.copy_asa_mvs2uss(src, dest) + elif self.executable: response = datasets._copy(src, dest, None, **opts) else: response = datasets._copy(src, dest) + if response.rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), @@ -1448,6 +1497,17 @@ def _mvs_copy_to_uss( else: if self.executable: response = datasets._copy(src, dest, None, **opts) + + if response.rc != 0: + raise CopyOperationError( + msg="Error while copying source {0} to {1}".format(src, dest), + rc=response.rc, + stdout=response.stdout_response, + stderr=response.stderr_response + ) + elif self.asa_text: + response = copy.copy_asa_pds2uss(src, dest) + if response.rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), @@ -1456,7 +1516,14 @@ def _mvs_copy_to_uss( stderr=response.stderr_response ) else: - copy.copy_pds2uss(src, dest, is_binary=self.is_binary) + copy.copy_pds2uss( + src, + dest, + is_binary=self.is_binary, + asa_text=self.asa_text + ) + except CopyOperationError as err: + raise err except Exception as err: raise CopyOperationError(msg=str(err)) @@ -1468,6 +1535,7 @@ def __init__( is_binary=False, executable=False, aliases=False, + asa_text=False, backup_name=None, force_lock=False, ): @@ -1488,8 +1556,9 @@ def __init__( is_binary=is_binary, executable=executable, aliases=aliases, + asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock, + force_lock=force_lock ) def copy_to_pdse( @@ -1578,7 +1647,11 @@ def copy_to_pdse( else: new_members.append(destination_member) - result = self.copy_to_member(src_member, "{0}({1})".format(dest, destination_member)) + result = self.copy_to_member( + src_member, + "{0}({1})".format(dest, destination_member), + src_ds_type + ) if result["rc"] != 0: msg = "Unable to copy source {0} to data set member {1}({2})".format( @@ -1598,7 +1671,8 @@ def copy_to_pdse( def copy_to_member( self, src, - dest + dest, + src_type ): """Copy source to a PDS/PDSE member. The only valid sources are: - USS files @@ -1608,6 +1682,7 @@ def copy_to_member( Arguments: src {str} -- Path to USS file or data set name. dest {str} -- Name of destination data set + src_type {str} -- Type of the source. Returns: dict -- Dictionary containing the return code, stdout, and stderr from @@ -1618,22 +1693,27 @@ def copy_to_member( opts = dict() opts["options"] = "" - if self.is_binary: - opts["options"] = "-B" + if src_type == 'USS' and self.asa_text: + response = copy.copy_asa_uss2mvs(src, dest) + else: + # While ASA files are just text files, we do a binary copy + # so dcp doesn't introduce any additional blanks or newlines. + if self.is_binary or self.asa_text: + opts["options"] = "-B" - if self.aliases and not self.executable: - # lower case 'i' for text-based copy (dcp) - opts["options"] = "-i" + if self.aliases and not self.executable: + # lower case 'i' for text-based copy (dcp) + opts["options"] = "-i" - if self.executable: - opts["options"] = "-X" - if self.aliases: - opts["options"] = "-IX" + if self.executable: + opts["options"] = "-X" + if self.aliases: + opts["options"] = "-IX" - if self.force_lock: - opts["options"] += " -f" + if self.force_lock: + opts["options"] += " -f" - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, None, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response return dict( @@ -1702,6 +1782,7 @@ def get_data_set_attributes( name, size, is_binary, + asa_text=False, record_format=None, record_length=None, type="SEQ", @@ -1725,6 +1806,7 @@ def get_data_set_attributes( name (str) -- Name of the new sequential data set. size (int) -- Number of bytes needed for the new data set. is_binary (bool) -- Whether or not the data set will have binary data. + asa_text (bool) -- Whether the data set will have ASA control characters. record_format (str, optional) -- Type of record format. record_length (int, optional) -- Record length for the data set. type (str, optional) -- Type of the new data set. @@ -1761,6 +1843,10 @@ def get_data_set_attributes( else: block_size = max_block_size + if asa_text: + record_format = "FBA" + block_size = 27920 + parms = dict( name=name, type=type, @@ -1783,6 +1869,8 @@ def create_seq_dataset_from_file( dest, force, is_binary, + asa_text, + record_length=None, volume=None ): """Creates a new sequential dataset with attributes suitable to copy the @@ -1793,21 +1881,37 @@ def create_seq_dataset_from_file( dest (str) -- Name of the data set. force (bool) -- Whether to replace an existing data set. is_binary (bool) -- Whether the file has binary data. + asa_text (bool) -- Whether the file has ASA control characters. volume (str, optional) -- Volume where the data set should be. """ src_size = os.stat(file).st_size - record_format = record_length = None + # record_format = record_length = None + record_format = None + # When dealing with ASA files, if copying from USS, + # the record length will need to be adjusted (we know it + # comes from USS because those flows don't send a + # value for record_length, while flows from source data + # sets do). + adjust_record_format = False # When src is a binary file, the module will use default attributes # for the data set, such as a record format of "VB". if not is_binary: record_format = "FB" - record_length = get_file_record_length(file) + if not record_length: + record_length = get_file_record_length(file) + adjust_record_format = True + + if asa_text and adjust_record_format: + # Adding one byte more to the record length to account for the + # control character at the start of each line. + record_length += 1 dest_params = get_data_set_attributes( name=dest, size=src_size, is_binary=is_binary, + asa_text=asa_text, record_format=record_format, record_length=record_length, volume=volume @@ -1849,7 +1953,10 @@ def is_compatible( src_member, is_src_dir, is_src_inline, - executable + executable, + asa_text, + src_has_asa_chars, + dest_has_asa_chars ): """Determine whether the src and dest are compatible and src can be copied to dest. @@ -1862,6 +1969,9 @@ def is_compatible( is_src_dir {bool} -- Whether the src is a USS directory. is_src_inline {bool} -- Whether the src comes from inline content. executable {bool} -- Whether the src is a executable to be copied. + asa_text {bool} -- Whether the copy operation will handle ASA control characters. + src_has_asa_chars {bool} -- Whether the src contains ASA control characters. + dest_has_asa_chars {bool} -- Whether the dest contains ASA control characters. Returns: {bool} -- Whether src can be copied to dest. @@ -1882,6 +1992,13 @@ def is_compatible( if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: return False + # ******************************************************************** + # For copy operations involving ASA control characters, at least one + # of the files/data sets has got to have ASA characters. + # ******************************************************************** + if asa_text: + return src_has_asa_chars or dest_has_asa_chars + # ******************************************************************** # If source is a sequential data set, then destination must be # partitioned data set member, other sequential data sets or USS files. @@ -2076,6 +2193,7 @@ def get_attributes_of_any_dataset_created( src, src_name, is_binary, + asa_text, volume=None ): """ @@ -2088,6 +2206,7 @@ def get_attributes_of_any_dataset_created( src (str) -- Name of the source data set, used as a model when appropiate. src_name (str) -- Extraction of the source name without the member pattern. is_binary (bool) -- Whether the data set will contain binary data. + asa_text (bool) -- Whether the data set will contain ASA control characters. volume (str, optional) -- Volume where the data set should be allocated into. Returns: @@ -2098,14 +2217,32 @@ def get_attributes_of_any_dataset_created( if src_ds_type == "USS": if os.path.isfile(src): size = os.stat(src).st_size - params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + params = get_data_set_attributes( + dest, + size=size, + is_binary=is_binary, + asa_text=asa_text, + volume=volume + ) else: size = os.path.getsize(src) - params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + params = get_data_set_attributes( + dest, + size=size, + is_binary=is_binary, + asa_text=asa_text, + volume=volume + ) else: src_attributes = datasets.listing(src_name)[0] size = int(src_attributes.total_space) - params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + params = get_data_set_attributes( + dest, + size=size, + is_binary=is_binary, + asa_text=asa_text, + volume=volume + ) return params @@ -2118,6 +2255,7 @@ def allocate_destination_data_set( force, is_binary, executable, + asa_text, dest_data_set=None, volume=None ): @@ -2134,6 +2272,7 @@ def allocate_destination_data_set( force (bool) -- Whether to replace an existent data set. is_binary (bool) -- Whether the data set will contain binary data. executable (bool) -- Whether the data to copy is an executable dataset or file. + asa_text (bool) -- Whether the data to copy has ASA control characters. dest_data_set (dict, optional) -- Parameters containing a full definition of the new data set; they will take precedence over any other allocation logic. volume (str, optional) -- Volume where the data set should be allocated into. @@ -2169,16 +2308,26 @@ def allocate_destination_data_set( if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, force, is_binary, volume=volume) + create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: temp_dump = None try: # Dumping the member into a file in USS to compute the record length and # size for the new data set. + src_attributes = datasets.listing(src_name)[0] + record_length = int(src_attributes.lrecl) temp_dump = dump_data_set_member_to_file(src, is_binary) - create_seq_dataset_from_file(temp_dump, dest, force, is_binary, volume=volume) + create_seq_dataset_from_file( + temp_dump, + dest, + force, + is_binary, + asa_text, + record_length=record_length, + volume=volume + ) finally: if temp_dump: os.remove(temp_dump) @@ -2195,6 +2344,7 @@ def allocate_destination_data_set( dest, size, is_binary, + asa_text, record_format=record_format, record_length=record_length, type="LIBRARY", @@ -2202,15 +2352,23 @@ def allocate_destination_data_set( ) data_set.DataSet.ensure_present(replace=force, **dest_params) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.listing(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) record_format = src_attributes.recfm record_length = int(src_attributes.lrecl) - dest_params = get_data_set_attributes(dest, size, is_binary, record_format=record_format, record_length=record_length, type="PDSE", - volume=volume) + dest_params = get_data_set_attributes( + dest, + size, + is_binary, + asa_text, + record_format=record_format, + record_length=record_length, + type="PDSE", + volume=volume + ) data_set.DataSet.ensure_present(replace=force, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): @@ -2226,6 +2384,11 @@ def allocate_destination_data_set( record_format = "FB" record_length = get_file_record_length(src) + # Adding 1 byte to the record length to accommodate + # ASA control chars. + if asa_text: + record_length += 1 + if executable: record_format = "U" record_length = 0 @@ -2235,6 +2398,7 @@ def allocate_destination_data_set( dest, size, is_binary, + asa_text, record_format=record_format, record_length=record_length, type=type_ds, @@ -2254,7 +2418,14 @@ def allocate_destination_data_set( volume=volume ) else: - dest_params = get_data_set_attributes(dest, size, is_binary, type="PDSE", volume=volume) + dest_params = get_data_set_attributes( + dest, + size, + is_binary, + asa_text, + type="PDSE", + volume=volume + ) data_set.DataSet.ensure_present(replace=force, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: @@ -2264,7 +2435,15 @@ def allocate_destination_data_set( data_set.DataSet.ensure_absent(dest, volumes=volumes) data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) if dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_params = get_attributes_of_any_dataset_created(dest, src_ds_type, src, src_name, is_binary, volume) + dest_params = get_attributes_of_any_dataset_created( + dest, + src_ds_type, + src, + src_name, + is_binary, + asa_text, + volume + ) dest_attributes = datasets.listing(dest)[0] record_format = dest_attributes.recfm dest_params["type"] = dest_ds_type @@ -2371,6 +2550,7 @@ def run_module(module, arg_def): remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') executable = module.params.get('executable') + asa_text = module.params.get('asa_text') aliases = module.params.get('aliases') backup = module.params.get('backup') backup_name = module.params.get('backup_name') @@ -2436,6 +2616,10 @@ def run_module(module, arg_def): # ******************************************************************** dest_member_exists = False converted_src = None + # By default, we'll assume that src and dest don't have ASA control + # characters. We'll only update these variables when they are + # data sets with record format 'FBA' or 'VBA'. + src_has_asa_chars = dest_has_asa_chars = False try: # If temp_path, the plugin has copied a file from the controller to USS. if temp_path or "/" in src: @@ -2481,6 +2665,10 @@ def run_module(module, arg_def): raise NonExistentSourceError(src) src_ds_type = data_set.DataSet.data_set_type(src_name) + if src_ds_type not in data_set.DataSet.MVS_VSAM: + src_attributes = datasets.listing(src_name)[0] + if src_attributes.recfm == 'FBA' or src_attributes.recfm == 'VBA': + src_has_asa_chars = True else: raise NonExistentSourceError(src) @@ -2522,6 +2710,15 @@ def run_module(module, arg_def): if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type") + if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): + dest_has_asa_chars = True + elif not dest_exists and asa_text: + dest_has_asa_chars = True + elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: + dest_attributes = datasets.listing(dest_name)[0] + if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_has_asa_chars = True + if dest_ds_type in data_set.DataSet.MVS_PARTITIONED: # Checking if the members that would be created from the directory files # are already present on the system. @@ -2554,12 +2751,20 @@ def run_module(module, arg_def): src_member, is_src_dir, (src_ds_type == "USS" and src is None), - executable + executable, + asa_text, + src_has_asa_chars, + dest_has_asa_chars ): + error_msg = "Incompatible target type '{0}' for source '{1}'".format( + dest_ds_type, src_ds_type + ) + + if asa_text: + error_msg = "{0}. Neither the source or the destination are ASA text files.".format(error_msg) + module.fail_json( - msg="Incompatible target type '{0}' for source '{1}'".format( - dest_ds_type, src_ds_type - ) + msg=error_msg ) # ******************************************************************** @@ -2683,6 +2888,7 @@ def run_module(module, arg_def): force, is_binary, executable, + asa_text, dest_data_set=dest_data_set, volume=volume ) @@ -2711,6 +2917,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + asa_text=asa_text, backup_name=backup_name, force_lock=force_lock, ) @@ -2731,6 +2938,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + asa_text=asa_text, aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, @@ -2774,6 +2982,7 @@ def run_module(module, arg_def): # Copy to sequential data set (PS / SEQ) # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_SEQ: + # TODO: check how ASA behaves with this if src_ds_type == "USS" and not is_binary: new_src = conv_path or temp_path or src conv_path = normalize_line_endings(new_src, encoding) @@ -2783,6 +2992,7 @@ def run_module(module, arg_def): temp_path, conv_path, dest, + src_ds_type ) res_args["changed"] = True dest = dest.upper() @@ -2798,6 +3008,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + asa_text=asa_text, aliases=aliases, backup_name=backup_name, force_lock=force_lock, @@ -2846,6 +3057,7 @@ def main(): dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), + asa_text=dict(type='bool', default=False), aliases=dict(type='bool', default=False, required=False), encoding=dict( type='dict', @@ -2949,6 +3161,7 @@ def main(): dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), executable=dict(arg_type='bool', required=False, default=False), + asa_text=dict(arg_type='bool', required=False, default=False), aliases=dict(arg_type='bool', required=False, default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 1fa6397e2..b42dd9500 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -87,6 +87,29 @@ {% endfor %} """ +# Text that will be used for the ASA control chars tests. +# It contains at least one instance of each control char. +ASA_SAMPLE_CONTENT = """ Space, do not advance. +0Newline before printing this line. + This line is not going to be seen. ++This line will overwrite the previous one. + This line will be partially seen because it will be longer than the next line. ++This line will partially overwrite the previous line. +-Three newlines before this one. +1This is a new page. +""" + +ASA_SAMPLE_RETURN = "\nSpace, do not advance.\n\nNewline before printing this line.\nThis line is not going to be seen.\rThis line will overwrite the previous one.\nThis line will be partially seen because it will be longer than the next line.\rThis line will partially overwrite the previous line.\n\n\nThree newlines before this one.\fThis is a new page." + +ASA_COPY_CONTENT = """ Space, do not advance. + 0Newline before printing this line. + This line is not going to be seen. + +This line will overwrite the previous one. + This line will be partially seen because it will be longer than the next line. + +This line will partially overwrite the previous line. + -Three newlines before this one. + 1This is a new page.""" + # SHELL_EXECUTABLE = "/usr/lpp/rsusr/ported/bin/bash" SHELL_EXECUTABLE = "/bin/sh" TEST_PS = "IMSTESTL.IMS01.DDCHKPT" @@ -1580,6 +1603,327 @@ def test_copy_template_file_to_dataset(ansible_zos_module): shutil.rmtree(temp_dir) +@pytest.mark.uss +@pytest.mark.seq +@pytest.mark.asa +def test_copy_asa_file_to_asa_sequential(ansible_zos_module): + hosts = ansible_zos_module + + try: + dest = "USER.ASA.SEQ" + hosts.all.zos_data_set(name=dest, state="absent") + + copy_result = hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=dest, + remote_src=False, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.uss +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_asa_file_to_asa_partitioned(ansible_zos_module): + hosts = ansible_zos_module + + try: + dest = "USER.ASA.PDSE" + hosts.all.zos_data_set(name=dest, state="absent") + full_dest = "{0}(TEST)".format(dest) + + copy_result = hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=full_dest, + remote_src=False, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(full_dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == full_dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.asa +def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.SEQ" + hosts.all.zos_data_set( + name=src, + state="present", + type="seq", + replace=True + ) + + dest = "USER.ASA.SEQ" + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.SEQ" + hosts.all.zos_data_set( + name=src, + state="present", + type="seq", + replace=True + ) + + dest = "USER.ASA.PDSE" + full_dest = "{0}(MEMBER)".format(dest) + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=src, + dest=full_dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(full_dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == full_dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.PDSE" + full_src = "{0}(MEMBER)".format(src) + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + replace=True + ) + + dest = "USER.ASA.SEQ" + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=full_src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=full_src, + dest=dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.PDSE" + full_src = "{0}(MEMBER)".format(src) + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + replace=True + ) + + dest = "USER.ASA.PDSE" + full_dest = "{0}(MEMBER)".format(dest) + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=full_src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=full_src, + dest=full_dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(full_dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == full_dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.uss +@pytest.mark.seq +@pytest.mark.asa +def test_copy_asa_data_set_to_text_file(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.ASA.SRC" + hosts.all.zos_data_set( + name=src, + state="present", + type="seq", + record_format="FBA", + record_length=80, + block_size=27920, + replace=True + ) + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=src, + remote_src=False + ) + + dest = "/tmp/zos_copy_asa_test.txt" + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat {0}".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Since OPUT preserves all blank spaces associated + # with a record, we strip them before comparing to + # what we expect. + for cp_line, content_line in zip(v_cp.get("stdout_lines"), ASA_COPY_CONTENT.splitlines()): + assert cp_line.rstrip() == content_line + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.file(path=dest, state="absent") + + @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_remote=False), dict(src="/etc/profile", is_remote=True),]) diff --git a/tests/pytest.ini b/tests/pytest.ini index 4226de838..cd4b8b3f6 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -10,4 +10,5 @@ markers = vsam: VSAM data sets test cases. template: Jinja2 templating test cases. aliases: aliases option test cases. - loadlib: executable copy test cases. \ No newline at end of file + loadlib: executable copy test cases. + asa: ASA text files test cases. \ No newline at end of file From b30f892a8b6031369985df5ff348879cf6c2a1ed Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 31 Oct 2023 13:02:56 -0600 Subject: [PATCH 212/413] Merge staging-v1.8.0-beta.1 back to dev (#1040) * Resolved merge conflicts from cherrypick * Delete changelogs --- CHANGELOG.rst | 50 +++++++++ README.md | 3 +- changelogs/changelog.yaml | 104 ++++++++++++++++++ ...4-zos-copy-add-data-set-member-aliases.yml | 5 - ...18-internal-consolidate-version-checks.yml | 9 -- .../fragments/1028-asa-control-chars.yml | 4 - .../fragments/1029-validate-path-join.yml | 7 -- .../1034-document-utf8-known-issue.yml | 23 ---- ...load_module_and_program_object_support.yml | 6 - ...n_zos_blockinfile_and_set_json_as_true.yml | 2 - ...s-lineinfile-does-not-behave-community.yml | 4 - ...-operator-response-come-back-truncate.yaml | 4 - .../920-zos-copy-add-library-choice.yml | 4 - .../934-Remove-conditional-unnecessary.yml | 2 - ...nhance-Add-wait-zos-operator-and-query.yml | 8 -- ...or-zos-copy-and-remove-temporary-files.yml | 7 -- ...os-job-submit-truncate-final-character.yml | 4 - .../fragments/959-ac-tool-update-mounts.yml | 3 - .../fragments/963-validate-path-join.yml | 5 - ...odify-get_data_set_attributes-function.yml | 3 - .../fragments/965-enhance-archive-tests.yml | 5 - .../966-ac-tool-add-python-311-3.yml | 3 - .../969-Simplify_loadlib_test_cases.yml | 3 - .../fragments/980-zos-copy-disp-shr.yml | 5 - docs/source/modules/zos_copy.rst | 6 +- docs/source/release_notes.rst | 62 +++++++++++ galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_job_submit.py | 2 +- plugins/module_utils/mvs_cmd.py | 2 +- plugins/modules/zos_copy.py | 6 +- plugins/modules/zos_operator_action_query.py | 2 +- .../functional/modules/test_zos_find_func.py | 23 ++-- .../modules/test_zos_job_output_func.py | 2 +- .../test_zos_operator_action_query_func.py | 2 +- .../modules/test_zos_operator_func.py | 2 +- 36 files changed, 247 insertions(+), 139 deletions(-) delete mode 100644 changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml delete mode 100644 changelogs/fragments/1018-internal-consolidate-version-checks.yml delete mode 100644 changelogs/fragments/1028-asa-control-chars.yml delete mode 100644 changelogs/fragments/1029-validate-path-join.yml delete mode 100644 changelogs/fragments/1034-document-utf8-known-issue.yml delete mode 100644 changelogs/fragments/804-improved_load_module_and_program_object_support.yml delete mode 100644 changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml delete mode 100644 changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml delete mode 100644 changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml delete mode 100644 changelogs/fragments/920-zos-copy-add-library-choice.yml delete mode 100644 changelogs/fragments/934-Remove-conditional-unnecessary.yml delete mode 100644 changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml delete mode 100644 changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml delete mode 100644 changelogs/fragments/952-zos-job-submit-truncate-final-character.yml delete mode 100644 changelogs/fragments/959-ac-tool-update-mounts.yml delete mode 100644 changelogs/fragments/963-validate-path-join.yml delete mode 100644 changelogs/fragments/964-modify-get_data_set_attributes-function.yml delete mode 100644 changelogs/fragments/965-enhance-archive-tests.yml delete mode 100644 changelogs/fragments/966-ac-tool-add-python-311-3.yml delete mode 100644 changelogs/fragments/969-Simplify_loadlib_test_cases.yml delete mode 100644 changelogs/fragments/980-zos-copy-disp-shr.yml diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a7c787d05..2c2815de4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,56 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.8.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2023-10-24' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) +- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). +- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) +- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) +- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) +- zos_copy - add support in zos_copy for text files and data sets containing ASA control characters. (https://github.com/ansible-collections/ibm_zos_core/pull/1028) + +Bugfixes +-------- + +- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). +- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) +- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) +- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) + +Known Issues +------------ + +- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972) +- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + +New Modules +----------- + +- ibm.ibm_zos_core.zos_script - Run scripts in z/OS + v1.7.0 ====== diff --git a/README.md b/README.md index 13f45889f..947740ad5 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,8 @@ querying operator actions, APF authorizing libraries, editing textual data in data sets or Unix System Services files, finding data sets, backing up and restoring data sets and volumes, mounting file systems, running z/OS programs without JCL, -initializing volumes, archiving, unarchiving and templating with Jinja. +running local and remote scripts on z/OS, initializing volumes, +archiving, unarchiving and templating with Jinja. Red Hat Ansible Certified Content for IBM Z diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 6988760f9..2e50559d7 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1023,3 +1023,107 @@ releases: - 930-archive-post-beta.yml - v1.7.0-beta.2_summary.yml release_date: '2023-08-21' + 1.8.0-beta.1: + changes: + bugfixes: + - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption + value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). + - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures + the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) + - zos_job_submit - The last line of the jcl was missing in the input. Fix now + ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) + - zos_lineinfile - A duplicate entry was made even if line was already present + in the target file. Fix now prevents a duplicate entry if the line already + exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) + - zos_operator - The last line of the operator was missing in the response of + the module. The fix now ensures the presence of the full output of the operator. + https://github.com/ansible-collections/ibm_zos_core/pull/918) + deprecated_features: + - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). + known_issues: + - Several modules have reported UTF8 decoding errors when interacting with results + that contain non-printable UTF8 characters in the response. This occurs when + a module receives content that does not correspond to a UTF-8 value. These + include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` + but are not limited to this list. This will be addressed in `ibm_zos_core` + version 1.10.0-beta.1. Each case is unique, some options to work around the + error are below. - Specify that the ASA assembler option be enabled to instruct + the assembler to use ANSI control characters instead of machine code control + characters. - Add `ignore_errors:true` to the playbook task so the task error + will not fail the playbook. - If the error is resulting from a batch job, + add `ignore_errors:true` to the task and capture the output into a variable + and extract the job ID with a regular expression and then use `zos_job_output` + to display the DD without the non-printable character such as the DD `JESMSGLG`. + (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) + (https://github.com/ansible-collections/ibm_zos_core/issues/972) + - With later versions of `ansible-core` used with `ibm_zos_core` collection + a warning has started to appear "Module "ansible.builtin.command" returned + non UTF-8 data in the JSON response" that is currently being reviewed. There + are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + minor_changes: + - module_utils/template - Add validation into path joins to detect unauthorized + path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_archive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_archive - Enhanced test cases to use test lines the same length of the + record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + - zos_copy - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_copy - Add new option `force_lock` that can copy into data sets that are + already in use by other processes (DISP=SHR). User needs to use with caution + because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). + - zos_copy - includes a new option `executable` that enables copying of executables + such as load modules or program objects to both USS and partitioned data sets. + When the `dest` option contains a non-existent data set, `zos_copy` will create + a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) + - zos_copy - introduces a new option 'aliases' to enable preservation of member + aliases when copying data to partitioned data sets (PDS) destinations from + USS or other PDS sources. Copying aliases of text based members to/from USS + is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) + - zos_fetch - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) + - zos_operator - Changed system to call 'wait=true' parameter to zoau call. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) + - zos_operator_action_query - Add a max delay of 5 seconds on each part of the + operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) + - zos_unarchive - Add validation into path joins to detect unauthorized path + traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive - Enhanced test cases to use test lines the same length of the + record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + release_summary: 'Release Date: ''2023-10-24'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1014-zos-copy-add-data-set-member-aliases.yml + - 1018-internal-consolidate-version-checks.yml + - 1029-validate-path-join.yml + - 1034-document-utf8-known-issue.yml + - 804-improved_load_module_and_program_object_support.yml + - 904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml + - 916-zos-lineinfile-does-not-behave-community.yml + - 918-zos-operator-response-come-back-truncate.yaml + - 920-zos-copy-add-library-choice.yml + - 934-Remove-conditional-unnecessary.yml + - 943-enhance-Add-wait-zos-operator-and-query.yml + - 951-Change-copy-for-zos-copy-and-remove-temporary-files.yml + - 952-zos-job-submit-truncate-final-character.yml + - 959-ac-tool-update-mounts.yml + - 963-validate-path-join.yml + - 964-modify-get_data_set_attributes-function.yml + - 965-enhance-archive-tests.yml + - 966-ac-tool-add-python-311-3.yml + - 969-Simplify_loadlib_test_cases.yml + - 980-zos-copy-disp-shr.yml + - v1.8.0-beta.1.yml + modules: + - description: Run scripts in z/OS + name: zos_script + namespace: '' + release_date: '2023-10-24' diff --git a/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml b/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml deleted file mode 100644 index 4122ea878..000000000 --- a/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: -- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases - when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. - Copying aliases of text based members to/from USS is not supported. - (https://github.com/ansible-collections/ibm_zos_core/pull/1014) \ No newline at end of file diff --git a/changelogs/fragments/1018-internal-consolidate-version-checks.yml b/changelogs/fragments/1018-internal-consolidate-version-checks.yml deleted file mode 100644 index 3698ed510..000000000 --- a/changelogs/fragments/1018-internal-consolidate-version-checks.yml +++ /dev/null @@ -1,9 +0,0 @@ -trivial: -- zoau_version_check - Change shell call to include call, for higher responsivity. - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) -- zos_operator - Use new version check - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) -- zos_operator_action_query - Use new version check - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) -- utils/job.py - Use new version check - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) diff --git a/changelogs/fragments/1028-asa-control-chars.yml b/changelogs/fragments/1028-asa-control-chars.yml deleted file mode 100644 index 6afc35e50..000000000 --- a/changelogs/fragments/1028-asa-control-chars.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_copy: add support in zos_copy for text files and data sets containing ASA - control characters. - (https://github.com/ansible-collections/ibm_zos_core/pull/1028) \ No newline at end of file diff --git a/changelogs/fragments/1029-validate-path-join.yml b/changelogs/fragments/1029-validate-path-join.yml deleted file mode 100644 index 785c1a41b..000000000 --- a/changelogs/fragments/1029-validate-path-join.yml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: - - zos_archive - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - module_utils/template - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) \ No newline at end of file diff --git a/changelogs/fragments/1034-document-utf8-known-issue.yml b/changelogs/fragments/1034-document-utf8-known-issue.yml deleted file mode 100644 index 860fd3f6c..000000000 --- a/changelogs/fragments/1034-document-utf8-known-issue.yml +++ /dev/null @@ -1,23 +0,0 @@ -known_issues: - - Several modules have reported UTF8 decoding errors when interacting with results - that contain non-printable UTF8 characters in the response. This occurs when - a module receives content that does not correspond to a UTF-8 value. - These include modules `zos_job_submit`, `zos_job_output`, - `zos_operator_action_query` but are not limited to this list. - This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. - Each case is unique, some options to work around the error are below. - - Specify that the ASA assembler option be enabled to instruct the assembler - to use ANSI control characters instead of machine code control characters. - - Add `ignore_errors:true` to the playbook task so the task error will not - fail the playbook. - - If the error is resulting from a batch job, add `ignore_errors:true` to the - task and capture the output into a variable and extract the job ID with a - regular expression and then use `zos_job_output` to display the DD without - the non-printable character such as the DD `JESMSGLG`. - (https://github.com/ansible-collections/ibm_zos_core/issues/677) - (https://github.com/ansible-collections/ibm_zos_core/issues/776) - (https://github.com/ansible-collections/ibm_zos_core/issues/972) - - With later versions of `ansible-core` used with `ibm_zos_core` collection a - warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" - that is currently being reviewed. There are no recommendations at this point. - (https://github.com/ansible-collections/ibm_zos_core/issues/983) diff --git a/changelogs/fragments/804-improved_load_module_and_program_object_support.yml b/changelogs/fragments/804-improved_load_module_and_program_object_support.yml deleted file mode 100644 index 07379c1e3..000000000 --- a/changelogs/fragments/804-improved_load_module_and_program_object_support.yml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- zos_copy - includes a new option `executable` that enables copying of executables such - as load modules or program objects to both USS and partitioned data sets. When - the `dest` option contains a non-existent data set, `zos_copy` will create a data set with - the appropriate attributes for an executable. - (https://github.com/ansible-collections/ibm_zos_core/pull/804) \ No newline at end of file diff --git a/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml b/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml deleted file mode 100644 index 9218a0ed3..000000000 --- a/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml +++ /dev/null @@ -1,2 +0,0 @@ -deprecated_features: - - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). \ No newline at end of file diff --git a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml deleted file mode 100644 index 9b13df055..000000000 --- a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. - Fix now prevents a duplicate entry if the line already exists in the target file. - (https://github.com/ansible-collections/ibm_zos_core/pull/916) \ No newline at end of file diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml deleted file mode 100644 index 1e2d3c10f..000000000 --- a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_operator - The last line of the operator was missing in the response of - the module. The fix now ensures the presence of the full output of the operator. - https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/changelogs/fragments/920-zos-copy-add-library-choice.yml b/changelogs/fragments/920-zos-copy-add-library-choice.yml deleted file mode 100644 index 2d339227b..000000000 --- a/changelogs/fragments/920-zos-copy-add-library-choice.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. - Documentation updated to reflect this change. - (https://github.com/ansible-collections/ibm_zos_core/pull/968). \ No newline at end of file diff --git a/changelogs/fragments/934-Remove-conditional-unnecessary.yml b/changelogs/fragments/934-Remove-conditional-unnecessary.yml deleted file mode 100644 index 3ceeffa99..000000000 --- a/changelogs/fragments/934-Remove-conditional-unnecessary.yml +++ /dev/null @@ -1,2 +0,0 @@ -trivial: - - zos_blockinfile - remove test conditional unnecessary (https://github.com/ansible-collections/ibm_zos_core/pull/934). \ No newline at end of file diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml deleted file mode 100644 index 91f920145..000000000 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ /dev/null @@ -1,8 +0,0 @@ -minor_changes: - - zos_operator - Changed system to call 'wait=true' parameter to zoau call. - Requires zoau 1.2.5 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. - Requires zoau 1.2.5 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/976) - diff --git a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml deleted file mode 100644 index 99a0599ec..000000000 --- a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml +++ /dev/null @@ -1,7 +0,0 @@ -bugfixes: - - zos_job_submit - Temporary files were created in tmp directory. - Fix now ensures the deletion of files every time the module run. - (https://github.com/ansible-collections/ibm_zos_core/pull/951) -minor_changes: - - zos_job_submit - Change action plugin call from copy to zos_copy. - (https://github.com/ansible-collections/ibm_zos_core/pull/951) \ No newline at end of file diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml deleted file mode 100644 index 7a4ce88cb..000000000 --- a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_job_submit - The last line of the jcl was missing in the input. - Fix now ensures the presence of the full input in job_submit. - (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file diff --git a/changelogs/fragments/959-ac-tool-update-mounts.yml b/changelogs/fragments/959-ac-tool-update-mounts.yml deleted file mode 100644 index 4eb90122d..000000000 --- a/changelogs/fragments/959-ac-tool-update-mounts.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Add ZOAU 1.2.4 and 1.2.5 mounts. - (https://github.com/ansible-collections/ibm_zos_core/pull/959) \ No newline at end of file diff --git a/changelogs/fragments/963-validate-path-join.yml b/changelogs/fragments/963-validate-path-join.yml deleted file mode 100644 index 129af357e..000000000 --- a/changelogs/fragments/963-validate-path-join.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_fetch - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/962) - - zos_copy - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/962) \ No newline at end of file diff --git a/changelogs/fragments/964-modify-get_data_set_attributes-function.yml b/changelogs/fragments/964-modify-get_data_set_attributes-function.yml deleted file mode 100644 index da384c77b..000000000 --- a/changelogs/fragments/964-modify-get_data_set_attributes-function.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- zos_copy - modify get_data_set_attributes helper function to no longer overwrite caller-defined attributes. - (https://github.com/ansible-collections/ibm_zos_core/pull/964) \ No newline at end of file diff --git a/changelogs/fragments/965-enhance-archive-tests.yml b/changelogs/fragments/965-enhance-archive-tests.yml deleted file mode 100644 index 80705e4c1..000000000 --- a/changelogs/fragments/965-enhance-archive-tests.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_archive - Enhanced test cases to use test lines the same length of the record length. - (https://github.com/ansible-collections/ibm_zos_core/pull/965) - - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. - (https://github.com/ansible-collections/ibm_zos_core/pull/965) \ No newline at end of file diff --git a/changelogs/fragments/966-ac-tool-add-python-311-3.yml b/changelogs/fragments/966-ac-tool-add-python-311-3.yml deleted file mode 100644 index 231d3e2be..000000000 --- a/changelogs/fragments/966-ac-tool-add-python-311-3.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Add python 3.11-3 mount table. - (https://github.com/ansible-collections/ibm_zos_core/pull/966) \ No newline at end of file diff --git a/changelogs/fragments/969-Simplify_loadlib_test_cases.yml b/changelogs/fragments/969-Simplify_loadlib_test_cases.yml deleted file mode 100644 index ce2060ed8..000000000 --- a/changelogs/fragments/969-Simplify_loadlib_test_cases.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- zos_copy - Divide large test case for loadlibs and simplify functions. - (https://github.com/ansible-collections/ibm_zos_core/pull/969) \ No newline at end of file diff --git a/changelogs/fragments/980-zos-copy-disp-shr.yml b/changelogs/fragments/980-zos-copy-disp-shr.yml deleted file mode 100644 index 541e611c1..000000000 --- a/changelogs/fragments/980-zos-copy-disp-shr.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: -- zos_copy - Add new option `force_lock` that can copy into data sets that are - already in use by other processes (DISP=SHR). User needs to use with caution - because this is subject to race conditions and can lead to data loss. - (https://github.com/ansible-collections/ibm_zos_core/pull/980). diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index e19332bf4..004671ebc 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -158,7 +158,7 @@ force force_lock - By default, when c(dest) is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. + By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. @@ -882,7 +882,7 @@ destination_attributes checksum SHA256 checksum of the file after running zos_copy. - | **returned**: C(validate) is C(true) and if dest is USS + | **returned**: When ``validate=true`` and if ``dest`` is USS | **type**: str | **sample**: 8d320d5f68b048fc97559d771ede68b37a71e8374d1d678d96dcfa2b2da7a64e @@ -945,7 +945,7 @@ state note A note to the user after module terminates. - | **returned**: C(force) is C(false) and dest exists + | **returned**: When ``force=true`` and ``dest`` exists | **type**: str | **sample**: No data was copied diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index de1a27013..10150952d 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,66 @@ Releases ======== +Version 1.8.0-beta.1 +==================== + +New Modules +----------- + +- ``zos_script`` - Run scripts in z/OS + +Minor Changes +------------- +- ``zos_archive`` + + - Add validation into path joins to detect unauthorized path traversals. + - Enhanced test cases to use test lines the same length of the record length. +- ``zos_copy`` + + - Add validation into path joins to detect unauthorized path traversals. + - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. + - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. + - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. + - add support in zos_copy for text files and data sets containing ASA control characters. +- ``zos_fetch`` - Add validation into path joins to detect unauthorized path traversals. +- ``zos_job_submit`` - Change action plugin call from copy to zos_copy. +- ``zos_operator`` - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. +- ``zos_operator_action_query`` - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. +- ``zos_unarchive`` + + - Add validation into path joins to detect unauthorized path traversals. + - Enhanced test cases to use test lines the same length of the record length. +- ``module_utils/template`` - Add validation into path joins to detect unauthorized path traversals. + +Bugfixes +-------- + +- ``zos_copy`` - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. +- ``zos_job_submit`` - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. +- ``zos_job_submit`` - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. +- ``zos_lineinfile`` - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. +- ``zos_operator`` - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. + +Known Issues +------------ + +- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. +- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS®`_ V2R4 or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3. + Version 1.7.0 ============= @@ -828,6 +888,8 @@ Known issues https://www.ibm.com/docs/en/zoau/1.2.x .. _Z Open Automation Utilities 1.2.3: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.2.4: + https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS®: diff --git a/galaxy.yml b/galaxy.yml index f5c0ccf46..b83b1014a 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.7.0 +version: 1.8.0-beta.1 # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 51e4c7392..7a68a05bb 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.7.0" +version: "1.8.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index db3fb1fd7..630ce7969 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -90,7 +90,7 @@ def run(self, tmp=None, task_vars=None): source_full = None try: source_full = self._loader.get_real_file(source) - source_rel = os.path.basename(source) + # source_rel = os.path.basename(source) except AnsibleFileNotFound as e: result["failed"] = True result["msg"] = "could not find src=%s, %s" % (source_full, e) diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 21d2b5a7e..ec4955ac6 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9bafdc471..ec48910e0 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -172,7 +172,7 @@ required: false force_lock: description: - - By default, when c(dest) is a MVS data set and is being used by another + - By default, when C(dest) is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use C(force_lock) to bypass this check and continue with copy. - If set to C(true) and destination is a MVS data set opened by another @@ -715,7 +715,7 @@ } checksum: description: SHA256 checksum of the file after running zos_copy. - returned: C(validate) is C(true) and if dest is USS + returned: When ``validate=true`` and if ``dest`` is USS type: str sample: 8d320d5f68b048fc97559d771ede68b37a71e8374d1d678d96dcfa2b2da7a64e backup_name: @@ -760,7 +760,7 @@ sample: file note: description: A note to the user after module terminates. - returned: C(force) is C(false) and dest exists + returned: When ``force=true`` and ``dest`` exists type: str sample: No data was copied msg: diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ddf895eb9..a06535763 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index fb1a47179..345927fe5 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -233,15 +233,22 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): def test_find_data_sets_in_volume(ansible_zos_module): - hosts = ansible_zos_module + try: + hosts = ansible_zos_module + data_set_name = "TEST.FIND.SEQ" + volume = "000000" + # Create temp data set + hosts.all.zos_data_set(name=data_set_name, type="seq", state="present", volumes=[volume]) + find_res = hosts.all.zos_find( + patterns=[data_set_name], volumes=[volume] + ) + print(vars(find_res)) + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) >= 1 + assert val.get('matched') >= 1 + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent") - find_res = hosts.all.zos_find( - patterns=['USER.*'], volumes=['IMSSUN'] - ) - print(vars(find_res)) - for val in find_res.contacted.values(): - assert len(val.get('data_sets')) >= 1 - assert val.get('matched') >= 1 def test_find_vsam_pattern(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 4b3990ab5..11b7cd90d 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 30f5175e4..c7afab2f9 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 4ad07d882..5aebe2a9c 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From dc5cdf686d605f5ed7c4b64c6e7f43811fd05595 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 6 Nov 2023 11:38:56 -0600 Subject: [PATCH 213/413] Fix sending a local archive into remote fails (#1045) * Added test * Added test for local archive and then remote unarchive * Added changelog * Removed format fixture * Ensure tempfile cleanup --- .../fragments/1045-local-uss-unarchive.yml | 5 ++ plugins/action/zos_unarchive.py | 4 +- .../modules/test_zos_unarchive_func.py | 46 ++++++++++++++++++- 3 files changed, 52 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1045-local-uss-unarchive.yml diff --git a/changelogs/fragments/1045-local-uss-unarchive.yml b/changelogs/fragments/1045-local-uss-unarchive.yml new file mode 100644 index 000000000..84bc5508c --- /dev/null +++ b/changelogs/fragments/1045-local-uss-unarchive.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_unarchive - Using a local file with a USS format option failed when sending to + remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set + as None when using a USS format option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1045). \ No newline at end of file diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 19cbf5ead..d808647ef 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -61,8 +61,6 @@ def run(self, tmp=None, task_vars=None): format_name = format.get("name") copy_module_args = dict() dest_data_set = format.get("dest_data_set") - if dest_data_set is None: - dest_data_set = dict() dest = "" if source.startswith('~'): source = os.path.expanduser(source) @@ -73,6 +71,8 @@ def run(self, tmp=None, task_vars=None): module_name="tempfile", module_args={}, task_vars=task_vars, ).get("path") elif format_name in MVS_SUPPORTED_FORMATS: + if dest_data_set is None: + dest_data_set = dict() tmp_hlq = module_args.get("tmp_hlq") if module_args.get("tmp_hlq") is not None else "" cmd_res = self._execute_module( module_name="command", diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 46a1e8534..2faba0023 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -87,6 +87,7 @@ def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): - test_uss_unarchive_include - test_uss_unarchive_exclude - test_uss_unarchive_list +- test_uss_unarchive_copy_to_remote """ @@ -248,7 +249,7 @@ def test_uss_unarchive_list(ansible_zos_module, format): @pytest.mark.uss @pytest.mark.parametrize("format", USS_FORMATS) -def test_uss_single_archive_with_mode(ansible_zos_module, format): +def test_uss_single_unarchive_with_mode(ansible_zos_module, format): try: hosts = ansible_zos_module hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") @@ -283,6 +284,49 @@ def test_uss_single_archive_with_mode(ansible_zos_module, format): finally: hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") +@pytest.mark.uss +def test_uss_unarchive_copy_to_remote(ansible_zos_module): + try: + import os + import tarfile + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + # create local tmp dir + tmp_dir = tempfile.TemporaryDirectory() + tmp_file = tempfile.NamedTemporaryFile(delete=False) + tar_file = tmp_dir.name + "/tmpfile.tar" + # create local file + with open(tmp_file.name, 'w') as f: + f.write("This is a sample text for the file") + # archive using different formats + with tarfile.open(tar_file, 'w') as tar: + tar.add(tmp_file.name) + + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=tar_file, + dest=USS_TEMP_DIR, + format=dict( + name="tar" + ), + force=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd="ls {0}/{1}".format(USS_TEMP_DIR, tmp_file.name)) + for c_result in cmd_result.contacted.values(): + for file in USS_TEST_FILES.keys(): + assert tmp_file.name in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + os.remove(tmp_file.name) ###################################################################### # From b9ace9028bff04a8ebfac48f3d342da6d43308f6 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 8 Nov 2023 11:55:31 -0600 Subject: [PATCH 214/413] Clean temporary data sets created during XMIT unarchive operation (#1049) * Added a temp cleanup * Added changelog * Modified changelog * Added removal of src if remote_src is False Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Modified changelog fragments --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- changelogs/fragments/1049-xmit-temporary-data-sets.yml | 4 ++++ plugins/modules/zos_unarchive.py | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1049-xmit-temporary-data-sets.yml diff --git a/changelogs/fragments/1049-xmit-temporary-data-sets.yml b/changelogs/fragments/1049-xmit-temporary-data-sets.yml new file mode 100644 index 000000000..5ef0f2078 --- /dev/null +++ b/changelogs/fragments/1049-xmit-temporary-data-sets.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the + temporary data sets created. Fix now removes the temporary data sets. + (https://github.com/ansible-collections/ibm_zos_core/pull/1049). \ No newline at end of file diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index be7c93f5c..81737ed29 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -853,7 +853,8 @@ def unpack(self, src, dest): dds = {'args': 'UNPACK', 'sysut1': src, 'sysut2': dest} rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) if rc != 0: - self.clean_environment(data_sets=[dest], uss_files=[], remove_targets=True) + ds_remove_list = [dest, src] if not self.remote_src else [dest] + self.clean_environment(data_sets=ds_remove_list, uss_files=[], remove_targets=True) self.module.fail_json( msg="Failed executing AMATERSE to restore {0} into {1}".format(src, dest), stdout=out, @@ -881,6 +882,8 @@ def unpack(self, src, dest): """.format(src, dest) rc, out, err = mvs_cmd.ikjeft01(cmd=unpack_cmd, authorized=True) if rc != 0: + ds_remove_list = [dest, src] if not self.remote_src else [dest] + self.clean_environment(data_sets=ds_remove_list, uss_files=[], remove_targets=True) self.module.fail_json( msg="Failed executing RECEIVE to restore {0} into {1}".format(src, dest), stdout=out, From 437c0dadff44a7b90b1fe0c808a590c9e88c720a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 9 Nov 2023 10:56:16 -0600 Subject: [PATCH 215/413] Enabler/1002/test_collections_on_ansible core 2_16 (#1053) * Add sanity ignore * Move dependencyfinder * Revert "Move dependencyfinder" This reverts commit 2bbbc5adffe94b32dd6d1af12f7c7cace93cca94. * Modify dependecyfinder * Modify dependecyfinder * Modify dependecyfinder * Add fragment * Modify fragment --- ...053-Enabler_1002_test_collections_on_ansible_core_2_16.yml | 4 ++++ tests/sanity/ignore-2.16.txt | 2 ++ 2 files changed, 6 insertions(+) create mode 100644 changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml diff --git a/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml b/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml new file mode 100644 index 000000000..ac3c24bb5 --- /dev/null +++ b/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml @@ -0,0 +1,4 @@ +trivial: + - zos_archive - add missing-gplv3-license ignore to ignore 2.16. + - zos_unarchive - add missing-gplv3-license ignore to ignore 2.16. + (https://github.com/ansible-collections/ibm_zos_core/pull/1053). diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index a4835475f..70d4764e1 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -34,3 +34,5 @@ plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ed65e8edd467797c051e9e11bd2268a7f78c6af4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 9 Nov 2023 14:03:47 -0600 Subject: [PATCH 216/413] [Enabler] [zos_mvs_raw] Remove Try, Except, Pass from code (#1051) * Added action inside exception to avoid pass * Added action inside exception to avoid pass * Added changelog --- changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml | 4 ++++ plugins/modules/zos_mvs_raw.py | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml diff --git a/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml b/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml new file mode 100644 index 000000000..59b33d02c --- /dev/null +++ b/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml @@ -0,0 +1,4 @@ +trivial: + - zos_mvs_raw - Removed Try, Except, Pass from the code, try block is in place to ignore any errors, + pass statement was changed to a variable assignment. This does not change any behavior. + (https://github.com/ansible-collections/ibm_zos_core/pull/1051). \ No newline at end of file diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index fa6f71908..55937ea63 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -2766,7 +2766,9 @@ def data_set_exists(name, volumes=None): present, changed = DataSet.attempt_catalog_if_necessary(name, volumes) exists = present except Exception: - pass + # Failure locating or cataloging the data set. Go ahead assumming it does not exist. + # exists = False to avoid using pass clause which results in bandit warning. + exists = False return exists From c2ec92282d2af52f175df9efc157850df999eeb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:04:30 -0600 Subject: [PATCH 217/413] Update sanity test ignore (#1048) * Fixed Sanity ignore #6 and reduce use of Sanity issues 8-9 * Remove unused import * Remove ignore * Add fragment * Modify fragment * Change fragment * Update 1048-Update_sanity_tests_ignore.yml * Change ignore 2_dot_14 --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1048-Update_sanity_tests_ignore.yml | 8 ++++++++ plugins/action/zos_copy.py | 5 +---- plugins/modules/zos_copy.py | 14 +++++++++----- tests/sanity/ignore-2.14.txt | 2 -- tests/sanity/ignore-2.15.txt | 2 -- 5 files changed, 18 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1048-Update_sanity_tests_ignore.yml diff --git a/changelogs/fragments/1048-Update_sanity_tests_ignore.yml b/changelogs/fragments/1048-Update_sanity_tests_ignore.yml new file mode 100644 index 000000000..5d2960d28 --- /dev/null +++ b/changelogs/fragments/1048-Update_sanity_tests_ignore.yml @@ -0,0 +1,8 @@ +trivial: + - zos_copy - change data type of parameter src from path to str inside AnsibleModule util. + - zos_copy - deprecate add_file_common_args argument. + - zos_copy - add owner and group to parameters inside AnsibleModule util. + - zos_copy - remove copy_member of AnsibleModule util as parameter and add to code logic. + - zos_copy - remove doc-default-does-not-match-spec ignore to ignore 2.14. + - zos_copy - remove doc-type-does-not-match-spec ignore to ignore 2.14. + (https://github.com/ansible-collections/ibm_zos_core/pull/1048). diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index d7d00eb64..592126b00 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -70,7 +70,7 @@ def run(self, tmp=None, task_vars=None): group = task_args.get("group", None) is_pds = is_src_dir = False - temp_path = is_uss = is_mvs_dest = copy_member = src_member = None + temp_path = is_uss = is_mvs_dest = src_member = None if dest: if not isinstance(dest, string_types): @@ -104,8 +104,6 @@ def run(self, tmp=None, task_vars=None): is_src_dir = os.path.isdir(src) is_pds = is_src_dir and is_mvs_dest - copy_member = is_member(dest) - if not src and not content: msg = "'src' or 'content' is required" return self._exit_action(result, msg, failed=True) @@ -249,7 +247,6 @@ def run(self, tmp=None, task_vars=None): is_uss=is_uss, is_pds=is_pds, is_src_dir=is_src_dir, - copy_member=copy_member, src_member=src_member, temp_path=temp_path, is_mvs_dest=is_mvs_dest, diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ec48910e0..d6559e793 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -114,7 +114,7 @@ be deleted and recreated following the process outlined in the C(volume) option. - When the C(dest) is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined - in the C(volume) option. + in the C(volume) option. - When C(dest) is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the C(volume) option. @@ -813,6 +813,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + is_member +) from ansible.module_utils._text import to_bytes, to_native from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import PY3 @@ -2566,7 +2569,6 @@ def run_module(module, arg_def): is_mvs_dest = module.params.get('is_mvs_dest') temp_path = module.params.get('temp_path') src_member = module.params.get('src_member') - copy_member = module.params.get('copy_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') force_lock = module.params.get('force_lock') @@ -2576,6 +2578,8 @@ def run_module(module, arg_def): if volume: dest_data_set["volumes"] = [volume] + copy_member = is_member(dest) + # ******************************************************************** # When copying to and from a data set member, 'dest' or 'src' will be # in the form DATA.SET.NAME(MEMBER). When this is the case, extract the @@ -3053,7 +3057,7 @@ def run_module(module, arg_def): def main(): module = AnsibleModule( argument_spec=dict( - src=dict(type='path'), + src=dict(type='str'), dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), @@ -3145,15 +3149,15 @@ def main(): is_mvs_dest=dict(type='bool'), size=dict(type='int'), temp_path=dict(type='str'), - copy_member=dict(type='bool'), src_member=dict(type='bool'), local_charset=dict(type='str'), force=dict(type='bool', default=False), force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), + owner=dict(type='str', required=False), + group=dict(type='str', required=False), tmp_hlq=dict(type='str', required=False, default=None), ), - add_file_common_args=True, ) arg_def = dict( diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 70d4764e1..415196660 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 70d4764e1..415196660 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin From 8148d54b7c15d1e0fe2c7a565ba61ac7c2217e48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:53:55 -0600 Subject: [PATCH 218/413] Update_sanity_ignore 2_16 (#1056) * Update sanity ignore 2_16 * Add fragment * Change fragment --- changelogs/fragments/1056-Update_sanity_ignore_2_16.yml | 4 ++++ tests/sanity/ignore-2.16.txt | 2 -- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1056-Update_sanity_ignore_2_16.yml diff --git a/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml b/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml new file mode 100644 index 000000000..a5b192519 --- /dev/null +++ b/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml @@ -0,0 +1,4 @@ +trivial: + - zos_copy - remove doc-default-does-not-match-spec 2.16 ignore file. + - zos_copy - remove doc-type-does-not-match-spec 2.16 ignore file. + (https://github.com/ansible-collections/ibm_zos_core/pull/1056). diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 70d4764e1..415196660 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin From 2aed55c21599e41cba69f6dba88d7a3d99b8194a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 9 Nov 2023 18:40:45 -0600 Subject: [PATCH 219/413] [Enabler] [module_utils/dd_statement.py] Changed try except pass to except specific DatasetDeleteError exception (#1052) * Changed try except pass to except specific class * Added changelog * Update 1052-try-except-pass-dd-statement.yml * Update dd_statement.py updated copyright year --- changelogs/fragments/1052-try-except-pass-dd-statement.yml | 4 ++++ plugins/module_utils/dd_statement.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1052-try-except-pass-dd-statement.yml diff --git a/changelogs/fragments/1052-try-except-pass-dd-statement.yml b/changelogs/fragments/1052-try-except-pass-dd-statement.yml new file mode 100644 index 000000000..42315337c --- /dev/null +++ b/changelogs/fragments/1052-try-except-pass-dd-statement.yml @@ -0,0 +1,4 @@ +trivial: + - zos_mvs_raw - Removed Try, Except, Pass from the code, instead catching DatasetDeleteError + and pass only in that case, any other exception will be raised. + (https://github.com/ansible-collections/ibm_zos_core/pull/1052). diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index ded94dbec..d35f9e44e 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -661,7 +661,7 @@ def __del__(self): """ try: DataSet.delete(self.name) - except Exception: + except DataSet.DatasetDeleteError: pass def _build_arg_string(self): From 5feec01d9514303e4bd1ac02b712eabe22e53ad2 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:14:18 -0600 Subject: [PATCH 220/413] Removed run command use of subprocess from encode defaults infavor of using AnsibleModule run command (#1055) * Removed run command use of subprocess from encode defaults infavor of using ansible module * Added changelog --- .../1055-remove-subprocess-encode.yml | 4 + plugins/module_utils/encode.py | 3 +- plugins/module_utils/system.py | 102 +++++++++--------- 3 files changed, 57 insertions(+), 52 deletions(-) create mode 100644 changelogs/fragments/1055-remove-subprocess-encode.yml diff --git a/changelogs/fragments/1055-remove-subprocess-encode.yml b/changelogs/fragments/1055-remove-subprocess-encode.yml new file mode 100644 index 000000000..7e458dc09 --- /dev/null +++ b/changelogs/fragments/1055-remove-subprocess-encode.yml @@ -0,0 +1,4 @@ +trivial: + - encode_utils - Removed use of subprocess from system utils, since the only + use of it could be replaced for AnsibleModule runcommand method. + (https://github.com/ansible-collections/ibm_zos_core/pull/1055). \ No newline at end of file diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 047aa654c..c36d0b272 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -62,7 +62,8 @@ def get_default_system_charset(): """ system_charset = locale.getdefaultlocale()[1] if system_charset is None: - rc, out, err = system.run_command("locale -c charmap") + module = AnsibleModuleHelper(argument_spec={}) + rc, out, err = module.run_command("locale -c charmap") if rc != 0 or not out or err: if system.is_zos(): system_charset = Defaults.DEFAULT_EBCDIC_USS_CHARSET diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 5be6d1944..5a452a48a 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -15,7 +15,7 @@ from platform import platform from os import name as OS_NAME from sys import platform as SYS_PLATFORM -from subprocess import Popen, PIPE +# from subprocess import Popen, PIPE from ansible.module_utils.six import binary_type, text_type, PY2, PY3 # from ansible.module_utils._text import to_text, to_bytes from ansible.module_utils.common.text.converters import to_bytes, to_text @@ -76,53 +76,53 @@ def is_zos(): return is_zos_unix and SYS_PLATFORM == "zos" -def run_command(args, stdin=None, **kwargs): - """ Execute a shell command on the current system. This function should only - be used when AnsibleModule.run_command() is not available. This function - essentially serves as a wrapper for Python subprocess.Popen and supports all - of the arguments supported by Popen. - - Required arguments: - args: args should be a sequence of program arguments or else a single - string or path-like object. By default, the program to execute is the - first item in args if args is a sequence. It is recommended to pass - args as a sequence. - - Refer to the following link for a more detailed description of this - parameter and other parameters. - https://docs.python.org/3/library/subprocess.html#subprocess.Popen - - Returns: - tuple[int, str, str]: The return code, stdout and stderr produced after - executing the command. - """ - rc = out = err = None - if not isinstance(args, (list, binary_type, text_type)): - rc = -1 - err = "'args' must be list or string" - return rc, out, err - - if isinstance(args, (text_type, str)): - if PY2: - args = to_bytes(args, errors='surrogate_or_strict') - elif PY3: - args = to_text(args, errors='surrogateescape') - args = split(args) - - kwargs.update( - dict( - stdin=PIPE if stdin else None, - stderr=PIPE, - stdout=PIPE - ) - ) - try: - cmd = Popen(args, **kwargs) - except TypeError as proc_err: - rc = -1 - err = str(proc_err) - return rc, out, err - - out, err = tuple(map(to_text, cmd.communicate())) - rc = cmd.returncode - return rc, out, err +# def run_command(args, stdin=None, **kwargs): +# """ Execute a shell command on the current system. This function should only +# be used when AnsibleModule.run_command() is not available. This function +# essentially serves as a wrapper for Python subprocess.Popen and supports all +# of the arguments supported by Popen. + +# Required arguments: +# args: args should be a sequence of program arguments or else a single +# string or path-like object. By default, the program to execute is the +# first item in args if args is a sequence. It is recommended to pass +# args as a sequence. + +# Refer to the following link for a more detailed description of this +# parameter and other parameters. +# https://docs.python.org/3/library/subprocess.html#subprocess.Popen + +# Returns: +# tuple[int, str, str]: The return code, stdout and stderr produced after +# executing the command. +# """ +# rc = out = err = None +# if not isinstance(args, (list, binary_type, text_type)): +# rc = -1 +# err = "'args' must be list or string" +# return rc, out, err + +# if isinstance(args, (text_type, str)): +# if PY2: +# args = to_bytes(args, errors='surrogate_or_strict') +# elif PY3: +# args = to_text(args, errors='surrogateescape') +# args = split(args) + +# kwargs.update( +# dict( +# stdin=PIPE if stdin else None, +# stderr=PIPE, +# stdout=PIPE +# ) +# ) +# try: +# cmd = Popen(args, **kwargs) +# except TypeError as proc_err: +# rc = -1 +# err = str(proc_err) +# return rc, out, err + +# out, err = tuple(map(to_text, cmd.communicate())) +# rc = cmd.returncode +# return rc, out, err From 6cd4f7c12533d70df3eb2741ee829dab97bfb391 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:16:10 -0600 Subject: [PATCH 221/413] [1.9.0] zos_apf remove try expect pass to better exception handling (#1036) * Removed except pass * Added empty strings * Added changelog * Corrected changelog * Modified if statement to honor current behavior * Update 1036-apf-try-except.yml * Update 1036-apf-try-except.yml --- changelogs/fragments/1036-apf-try-except.yml | 4 ++ plugins/modules/zos_apf.py | 40 ++++++++++---------- 2 files changed, 23 insertions(+), 21 deletions(-) create mode 100644 changelogs/fragments/1036-apf-try-except.yml diff --git a/changelogs/fragments/1036-apf-try-except.yml b/changelogs/fragments/1036-apf-try-except.yml new file mode 100644 index 000000000..16e8ab6c7 --- /dev/null +++ b/changelogs/fragments/1036-apf-try-except.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_apf - Improves exception handling if there is a failure + parsing the command response when operation selected is list. + (https://github.com/ansible-collections/ibm_zos_core/pull/1036). diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index d0fec1ff5..dee6094fc 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -520,29 +520,27 @@ def main(): operRc = ret.rc result['stderr'] = operErr result['rc'] = operRc + result['stdout'] = operOut if operation == 'list': - try: - dsRx = "" - volRx = "" - if library: - dsRx = re.compile(library) - if volume: - volRx = re.compile(volume) - if sms: - sms = "*SMS*" - if dsRx or volRx or sms: + if not library: + library = "" + if not volume: + volume = "" + if sms: + sms = "*SMS*" + if library or volume or sms: + try: data = json.loads(operOut) - operOut = "" - for d in data[2:]: - ds = d.get('ds') - vol = d.get('vol') - if (dsRx and dsRx.match(ds)) or (volRx and volRx.match(vol)) or (sms and sms == vol): - operOut = operOut + "{0} {1}\n".format(vol, ds) - except Exception: - pass - - result['stdout'] = operOut - + except json.JSONDecodeError: + module.exit_json(**result) + for d in data[2:]: + ds = d.get('ds') + vol = d.get('vol') + try: + if (library and re.match(library, ds)) or (volume and re.match(volume, vol)) or (sms and sms == vol): + result['stdout'] = "{0} {1}\n".format(vol, ds) + except re.error: + module.exit_json(**result) module.exit_json(**result) From 5e744c29e4b71f960e5ffc5c99115635f1996ad2 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:34:09 -0600 Subject: [PATCH 222/413] [v1.9.0] Replace randint to fix "Standard pseudo-random generators ... " bandit warning (#1016) * Changed random member name generation to choices * Modified choices in module_utils/data_set and blockinfile test * Added changelog fragment * Remove randint import --- changelogs/fragments/1016-remove-randint.yml | 5 +++++ plugins/module_utils/data_set.py | 9 +++++---- tests/functional/modules/test_zos_blockinfile_func.py | 4 ++-- 3 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1016-remove-randint.yml diff --git a/changelogs/fragments/1016-remove-randint.yml b/changelogs/fragments/1016-remove-randint.yml new file mode 100644 index 000000000..baac7fff9 --- /dev/null +++ b/changelogs/fragments/1016-remove-randint.yml @@ -0,0 +1,5 @@ +trivial: + - module_utils/data_set - Replace the use of random.randint to random.sample + to generate random member names, random.randint raised a warning while + scanning with bandit. + (https://github.com/ansible-collections/ibm_zos_core/pull/1016) \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index cbeb7eb7d..ab7a3c3c8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -17,7 +17,7 @@ import tempfile from os import path, walk from string import ascii_uppercase, digits -from random import randint +from random import sample # from ansible.module_utils._text import to_bytes from ansible.module_utils.common.text.converters import to_bytes from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( @@ -1745,9 +1745,10 @@ def temp_member_name(): """Generate a temp member name""" first_char_set = ascii_uppercase + "#@$" rest_char_set = ascii_uppercase + digits + "#@$" - temp_name = first_char_set[randint(0, len(first_char_set) - 1)] - for i in range(7): - temp_name += rest_char_set[randint(0, len(rest_char_set) - 1)] + # using sample as k=1 and k=7 to avoid using random.choice just for oneline import + temp_name = sample(first_char_set, k=1) + temp_name += sample(rest_char_set, k=7) + temp_name = "".join(temp_name) return temp_name diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 226f34477..d768ad59d 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1205,6 +1205,7 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup_name): hosts = ansible_zos_module ds_type = dstype + backup_ds_name = "" params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) if backup_name: params["backup_name"] = backup_name @@ -1227,8 +1228,7 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup remove_ds_environment(ansible_zos_module, ds_name) if backup_name: ansible_zos_module.all.zos_data_set(name="BLOCKIF.TEST.BACKUP", state="absent") - ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") - else: + if backup_ds_name != "": ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") From 04439922134c6065e8e8e9430860995e41fee800 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:45:00 -0600 Subject: [PATCH 223/413] Removed unused imports --- plugins/module_utils/system.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 5a452a48a..54ec90dca 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -16,10 +16,10 @@ from os import name as OS_NAME from sys import platform as SYS_PLATFORM # from subprocess import Popen, PIPE -from ansible.module_utils.six import binary_type, text_type, PY2, PY3 +# from ansible.module_utils.six import binary_type, text_type, PY2, PY3 # from ansible.module_utils._text import to_text, to_bytes -from ansible.module_utils.common.text.converters import to_bytes, to_text -from shlex import split +# from ansible.module_utils.common.text.converters import to_bytes, to_text +# from shlex import split NIX_PLATFORMS = frozenset({ From b100cd928e89556e7585f5e4e25e50373c9d9258 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 15 Nov 2023 12:59:55 -0500 Subject: [PATCH 224/413] 1043 bug title zos operator is passing wrong value to zoauopercmd (#1044) * corrected kwarg index value from 'wait_arg' to 'wait' Also corrected true/false issue in zoaq * Added and updated changelog. * update PR number in changelog fragment * changed test from \$ to \\$ to eliminate warning * added blocking test to maks sure minimum wait is reached in zoau>1.2.4.5 * removed the else condition from the blocking test, since it is not needed. * corrected tense grammer in changelog fragment * corrected capitalization of ZOAU in changelog fragment. --- ...-is-passing-wrong-value-to-zoauopercmd.yml | 8 +++++ plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- .../modules/test_zos_operator_func.py | 31 +++++++++++++++---- 4 files changed, 35 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml diff --git a/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml b/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml new file mode 100644 index 000000000..06f9a264a --- /dev/null +++ b/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml @@ -0,0 +1,8 @@ +bugfixes: + - zos_operator - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. + (https://github.com/ansible-collections/ibm_zos_core/pull/1044). + + - zos_operator_action_query - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. + (https://github.com/ansible-collections/ibm_zos_core/pull/1044). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 2d1fb807f..969890ba5 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -283,7 +283,7 @@ def run_operator_command(params): use_wait_arg = True if use_wait_arg: - kwargs.update({"wait_arg": True}) + kwargs.update({"wait": True}) args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index a06535763..ccf565626 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -272,7 +272,7 @@ def run_module(): use_wait_arg = True if use_wait_arg: - kwargs.update({"wait_arg": False}) + kwargs.update({"wait": True}) args = [] diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 5aebe2a9c..6891cffa8 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -23,6 +23,11 @@ import pytest from pprint import pprint +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) + + __metaclass__ = type @@ -103,13 +108,9 @@ def test_zos_operator_positive_verbose_with_full_delay(ansible_zos_module): def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): hosts = ansible_zos_module wait_time_s=10 - #startmod = time.time() results = hosts.all.zos_operator( cmd="d u,all", verbose=True, wait_time_s=wait_time_s ) - # endmod = time.time() - # timediff = endmod - startmod - # assert timediff < 15 for result in results.contacted.values(): assert result["rc"] == 0 @@ -119,13 +120,31 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): assert result.get('elapsed') <= (2 * wait_time_s) +def test_zos_operator_positive_verbose_blocking(ansible_zos_module): + if zoau_version_checker.is_zoau_version_higher_than("1.2.4.5"): + hosts = ansible_zos_module + wait_time_s=5 + results = hosts.all.zos_operator( + cmd="d u,all", verbose=True, wait_time_s=wait_time_s + ) + + for result in results.contacted.values(): + assert result["rc"] == 0 + assert result.get("changed") is True + assert result.get("content") is not None + # Account for slower network + assert result.get('elapsed') >= wait_time_s + + + def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_operator(cmd="\$dspl") + results = hosts.all.zos_operator(cmd="\\$dspl") res = dict() res["stdout"] = [] for result in results.contacted.values(): stdout = result.get('content') # HASP646 Only appears in the last line that before did not appears last_line = len(stdout) - assert "HASP646" in stdout[last_line - 1] \ No newline at end of file + assert "HASP646" in stdout[last_line - 1] + From 3c1f0a4c5bfa7a57bc08874fde0a2557af27d092 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 15 Nov 2023 13:00:56 -0500 Subject: [PATCH 225/413] Bug 1041 zos submit job honor return output literally (#1058) * initial commit to pass return_output to job_output. * corrected fragment name to match branch * tweaked data set test to show result values if positive test fails * removed trace in zos_data_set, and added trace output to job_submit * removed extra text from functional testing. * put in correct PR number in changelog fragment. * changed trivial to minor_changes, added documentation to dd_scan in job:job_output. --- ...41-bug-zos-submit-job-honor-return-output-literally.yml | 4 ++++ plugins/module_utils/job.py | 7 ++++--- plugins/modules/zos_job_submit.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml diff --git a/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml b/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml new file mode 100644 index 000000000..726397d2d --- /dev/null +++ b/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_submit_job - Previous code did not return output, but still requested job data from the target system. + This changes to honor return_output=false by not querying the job dd segments at all. + (https://github.com/ansible-collections/ibm_zos_core/pull/1058). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index cfe8c4a67..bf23bf5bc 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -36,7 +36,7 @@ ) -def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, timeout=0, start_time=timer()): +def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. Keyword Arguments: @@ -44,6 +44,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, owner (str) -- The owner of the job (default: {None}) job_name (str) -- The job name search for (default: {None}) dd_name (str) -- The data definition to retrieve (default: {None}) + dd_scan (bool) - Whether or not to pull information from the dd's for this job {default: {True}} duration (int) -- The time the submitted job ran for timeout (int) - how long to wait in seconds for a job to complete start_time (int) - time the JCL started its submission @@ -70,7 +71,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, dd_name = parsed_args.get("dd_name") or "" job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, duration=duration, timeout=timeout, start_time=start_time) + dd_name=dd_name, duration=duration, dd_scan=dd_scan, timeout=timeout, start_time=start_time) # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): # current_time = timer() @@ -83,7 +84,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, owner = "" if owner == "*" else owner job_name = "" if job_name == "*" else job_name job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, duration=duration, timeout=timeout, start_time=start_time) + dd_name=dd_name, dd_scan=dd_scan, duration=duration, timeout=timeout, start_time=start_time) return job_detail diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index efdbd07d6..11f0f3ccb 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -959,7 +959,7 @@ def run_module(): job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, - duration=duration, timeout=wait_time_s, start_time=start_time) + dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) result["duration"] = duration From ec737c3beaa102ee3cccf2852c6812e74121d460 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Thu, 23 Nov 2023 09:36:57 -0700 Subject: [PATCH 226/413] Enabler/1024/remote_tmp for zos_script (#1060) * Changed tmp_path for Ansible's remote_tmp * Remove tmp_path from module's options * Update module documentation * Remove tmp_path test case * Update zos_script's RST file * Add changelog fragment * Updated module examples --- .../fragments/1060-remote_tmp_zos_script.yml | 5 +++ docs/source/modules/zos_script.rst | 20 +++-------- plugins/action/zos_script.py | 4 ++- plugins/modules/zos_script.py | 23 ++++-------- .../modules/test_zos_script_func.py | 35 ------------------- 5 files changed, 20 insertions(+), 67 deletions(-) create mode 100644 changelogs/fragments/1060-remote_tmp_zos_script.yml diff --git a/changelogs/fragments/1060-remote_tmp_zos_script.yml b/changelogs/fragments/1060-remote_tmp_zos_script.yml new file mode 100644 index 000000000..1185f3a1b --- /dev/null +++ b/changelogs/fragments/1060-remote_tmp_zos_script.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_script - add support for remote_tmp from the Ansible + configuration to setup where temporary files will be created, + replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1060). \ No newline at end of file diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 29d9bb2df..bc8dff3c0 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -102,19 +102,6 @@ removes | **type**: str -tmp_path - Directory path in the remote machine where local scripts will be temporarily copied to. - - When not specified, the module will copy local scripts to the default temporary path for the user. - - If ``tmp_path`` does not exist in the remote machine, the module will not create it. - - All scripts copied to ``tmp_path`` will be removed from the managed node before the module finishes executing. - - | **required**: False - | **type**: str - - use_template Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. @@ -264,11 +251,10 @@ Examples remote_src: true chdir: /u/user/output_dir - - name: Run a local Python script that uses a custom tmp_path. + - name: Run a local Python script in the temporary directory specified in the Ansible environment variable 'remote_tmp'. zos_script: cmd: ./scripts/program.py executable: /usr/bin/python3 - tmp_path: /usr/tmp/ibm_zos_core - name: Run a local script made from a template. zos_script: @@ -294,6 +280,10 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. + The location in the z/OS system where local scripts will be copied to can be configured through Ansible's ``remote_tmp`` option. Refer to `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`_ for more information. + + All local scripts copied to a remote z/OS system will be removed from the managed node before the module finishes executing. + Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for remote scripts will be restored by the module before the task ends. The module will only add execution permissions for the file owner. diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index a17934ac4..36345810b 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -56,7 +56,9 @@ def run(self, tmp=None, task_vars=None): if not remote_src: script_path = path.abspath(path.normpath(script_path)) script_name = path.basename(script_path) - tmp_path = module_args.get('tmp_path') + # Accessing the globally-defined temporary directory + # that Ansible expects to be used. + tmp_path = self._connection._shell._options.get("remote_tmp") # Getting a temporary path for the script. tempfile_args = dict( diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index 15699c4a1..b69d70b2d 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -92,18 +92,6 @@ script will not be executed. type: str required: false - tmp_path: - description: - - Directory path in the remote machine where local scripts will be - temporarily copied to. - - When not specified, the module will copy local scripts to - the default temporary path for the user. - - If C(tmp_path) does not exist in the remote machine, the - module will not create it. - - All scripts copied to C(tmp_path) will be removed from the managed - node before the module finishes executing. - type: str - required: false extends_documentation_fragment: - ibm.ibm_zos_core.template @@ -112,6 +100,12 @@ - When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. + - The location in the z/OS system where local scripts will be copied to can be + configured through Ansible's C(remote_tmp) option. Refer to + L(Ansible's documentation,https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp) + for more information. + - All local scripts copied to a remote z/OS system will be removed from the + managed node before the module finishes executing. - Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for remote scripts will be restored by the module before the task ends. @@ -154,11 +148,10 @@ remote_src: true chdir: /u/user/output_dir -- name: Run a local Python script that uses a custom tmp_path. +- name: Run a local Python script in the temporary directory specified in the Ansible environment variable 'remote_tmp'. zos_script: cmd: ./scripts/program.py executable: /usr/bin/python3 - tmp_path: /usr/tmp/ibm_zos_core - name: Run a local script made from a template. zos_script: @@ -251,7 +244,6 @@ def run_module(): executable=dict(type='str', required=False), remote_src=dict(type='bool', required=False), removes=dict(type='str', required=False), - tmp_path=dict(type='str', required=False), use_template=dict(type='bool', default=False), template_parameters=dict( type='dict', @@ -287,7 +279,6 @@ def run_module(): executable=dict(arg_type='path', required=False), remote_src=dict(arg_type='bool', required=False), removes=dict(arg_type='path', required=False), - tmp_path=dict(arg_type='path', required=False), use_template=dict(arg_type='bool', required=False), template_parameters=dict( arg_type='dict', diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py index 2bdae2a66..8bc310fe5 100644 --- a/tests/functional/modules/test_zos_script_func.py +++ b/tests/functional/modules/test_zos_script_func.py @@ -237,41 +237,6 @@ def test_rexx_script_chdir(ansible_zos_module): hosts.all.file(path=tmp_remote_dir, state='absent') -def test_rexx_script_tmp_path(ansible_zos_module): - import os - - hosts = ansible_zos_module - - try: - rexx_script = create_script_content('tmp_path test', 'rexx') - script_path = create_local_file(rexx_script, 'rexx') - - tmp_remote_dir = '/tmp/zos_script_tests' - file_result = hosts.all.file( - path=tmp_remote_dir, - state='directory' - ) - - for result in file_result.contacted.values(): - assert result.get('changed') is True - - zos_script_result = hosts.all.zos_script( - cmd=script_path, - tmp_path=tmp_remote_dir - ) - - for result in zos_script_result.contacted.values(): - assert result.get('changed') is True - assert result.get('failed', False) is False - assert result.get('rc') == 0 - assert result.get('stderr', '') == '' - assert tmp_remote_dir in result.get('remote_cmd', '') - finally: - if os.path.exists(script_path): - os.remove(script_path) - hosts.all.file(path=tmp_remote_dir, state='absent') - - def test_python_script(ansible_zos_module): import os From b17dad3ffed08f5653ea8cc1eabbdedd911460a3 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 23 Nov 2023 11:13:19 -0600 Subject: [PATCH 227/413] [zos_copy] Files corrupted after second copy (#1064) * Initial change to replace shutil.copy * Added fix for corrupted directory copies * Added changelog fragment * Modified docstring and fixed copy_tree * Added punctiation * Added copystat * Added set mode for dirs * Update 1064-corruped-second-copy.yml --- .../fragments/1064-corruped-second-copy.yml | 5 ++ plugins/modules/zos_copy.py | 70 ++++++++++++++++++- 2 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1064-corruped-second-copy.yml diff --git a/changelogs/fragments/1064-corruped-second-copy.yml b/changelogs/fragments/1064-corruped-second-copy.yml new file mode 100644 index 000000000..82a04426e --- /dev/null +++ b/changelogs/fragments/1064-corruped-second-copy.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. + (https://github.com/ansible-collections/ibm_zos_core/pull/1064). diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index d6559e793..dbed382f2 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -953,6 +953,64 @@ def copy_to_vsam(self, src, dest): cmd=repro_cmd, ) + def _copy_tree(self, entries, src, dest, dirs_exist_ok=False): + """Recursively copy USS directory to another USS directory. + This function was created to circumvent using shutil.copytree + as it presented the issue of corrupting file contents after second copy + because the use of shutil.copy2. This issue is only present in + Python 3.11 and 3.12. + + Arguments: + entries {list} -- List of files under src directory. + src_dir {str} -- USS source directory. + dest_dir {str} -- USS dest directory. + dirs_exist_ok {bool} -- Whether to copy files to an already existing directory. + + Raises: + Exception -- When copying into the directory fails. + + Returns: + {str } -- Destination directory that was copied. + """ + os.makedirs(dest, exist_ok=dirs_exist_ok) + for src_entry in entries: + src_name = os.path.join(validation.validate_safe_path(src), validation.validate_safe_path(src_entry.name)) + dest_name = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_entry.name)) + try: + if src_entry.is_symlink(): + link_to = os.readlink(src_name) + os.symlink(link_to, dest_name) + shutil.copystat(src_name, dest_name, follow_symlinks=True) + elif src_entry.is_dir(): + self.copy_tree(src_name, dest_name, dirs_exist_ok=dirs_exist_ok) + else: + opts = dict() + opts["options"] = "" + response = datasets._copy(src_name, dest_name, None, **opts) + if response.rc > 0: + raise Exception(response.stderr_response) + shutil.copystat(src_name, dest_name, follow_symlinks=True) + except Exception as err: + raise err + + return dest + + def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): + """ + Copies a USS directory into another USS directory. + + Arguments: + src_dir {str} -- USS source directory. + dest_dir {str} -- USS dest directory. + dirs_exist_ok {bool} -- Whether to copy files to an already existing directory. + + Returns: + {str} -- Destination directory that was copied. + """ + with os.scandir(src_dir) as itr: + entries = list(itr) + return self._copy_tree(entries, src_dir, dest_dir, dirs_exist_ok=dirs_exist_ok) + def convert_encoding(self, src, temp_path, encoding): """Convert encoding for given src @@ -1258,6 +1316,7 @@ def copy_to_uss( if not os.path.isdir(dest): self.module.set_mode_if_different(dest, mode, False) if changed_files: + self.module.set_mode_if_different(dest, mode, False) for filepath in changed_files: self.module.set_mode_if_different( os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(filepath)), mode, False @@ -1293,7 +1352,13 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): if self.is_binary: copy.copy_uss2uss_binary(new_src, dest) else: - shutil.copy(new_src, dest) + opts = dict() + opts["options"] = "" + response = datasets._copy(new_src, dest, None, **opts) + if response.rc > 0: + raise Exception(response.stderr_response) + shutil.copystat(new_src, dest, follow_symlinks=True) + # shutil.copy(new_src, dest) if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) @@ -1353,7 +1418,8 @@ def _copy_to_dir( try: if copy_directory: dest = os.path.join(validation.validate_safe_path(dest_dir), validation.validate_safe_path(os.path.basename(os.path.normpath(src_dir)))) - dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) + # dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) + dest = self.copy_tree(new_src_dir, dest, dirs_exist_ok=force) # Restoring permissions for preexisting files and subdirectories. for filepath, permissions in original_permissions: From 7800b6ac96426d6d875252c43999a005098f792a Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 27 Nov 2023 11:51:02 -0700 Subject: [PATCH 228/413] [Documentation] [zos_tso_command] Add REXX exec example (#1065) * Add REXX exec example * Add fragment * Update module documentation * Fix PR link * Reword example task name * Updated REXX example --- changelogs/fragments/1065-rexx-exec-tso_command.yml | 4 ++++ docs/source/modules/zos_tso_command.rst | 4 ++++ plugins/modules/zos_tso_command.py | 4 ++++ 3 files changed, 12 insertions(+) create mode 100644 changelogs/fragments/1065-rexx-exec-tso_command.yml diff --git a/changelogs/fragments/1065-rexx-exec-tso_command.yml b/changelogs/fragments/1065-rexx-exec-tso_command.yml new file mode 100644 index 000000000..5d20ccfd6 --- /dev/null +++ b/changelogs/fragments/1065-rexx-exec-tso_command.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_tso_command - add example for executing explicitly a REXX script from + a data set. + (https://github.com/ansible-collections/ibm_zos_core/pull/1065). diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index d11cc8a98..846cb93d8 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -71,6 +71,10 @@ Examples - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 + - name: Execute TSO command to run explicitly a REXX script from a data set. + zos_tso_command: + commands: + - EXEC HLQ.DATASET.REXX exec diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index e3c4c6f12..87b157318 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -111,6 +111,10 @@ - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 +- name: Execute TSO command to run a REXX script explicitly from a data set. + zos_tso_command: + commands: + - EXEC HLQ.DATASET.REXX exec """ from ansible.module_utils.basic import AnsibleModule From 44754ab9a527c77524f56bf18e11880cde8b6c1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 27 Nov 2023 16:38:05 -0600 Subject: [PATCH 229/413] Mvs to non existent mvs copy destination attrs match up (#1066) * Fixed error * Fix identation * Add fragment * Modify fragment * Modify fragment * Modify fragment --- ...nt_mvs_copy_destination_attrs_match_up.yml | 5 +++++ plugins/module_utils/data_set.py | 8 +++++++- plugins/modules/zos_copy.py | 20 +------------------ 3 files changed, 13 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml diff --git a/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml b/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml new file mode 100644 index 000000000..05e1c9ce4 --- /dev/null +++ b/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - When copying an executable data set with aliases and destination did not exist, + destination data set was created with wrong attributes. Fix now creates destination data set + with the same attributes as the source. + (https://github.com/ansible-collections/ibm_zos_core/pull/1066). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index ab7a3c3c8..cae505804 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -278,7 +278,7 @@ def ensure_uncataloged(name): return False @staticmethod - def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): + def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): """Allocates a data set based on the attributes of a 'model' data set. Useful when a data set needs to be created identical to another. Supported model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), @@ -291,6 +291,7 @@ def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): must be used. See extract_dsname(ds_name) in data_set.py model {str} -- The name of the data set whose allocation parameters should be used to allocate the new data set 'ds_name' + executable {bool} -- Whether the new data set should support executables asa_text {bool} -- Whether the new data set should support ASA control characters (have record format FBA) vol {str} -- The volume where data set should be allocated @@ -327,6 +328,11 @@ def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): alloc_cmd = """{0} - RECFM(F,B,A)""".format(alloc_cmd) + if executable: + alloc_cmd = """{0} - + RECFM(U) - + DSNTYPE(LIBRARY)""".format(alloc_cmd) + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True) if rc != 0: raise MVSCmdExecError(rc, out, err) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index dbed382f2..a2e545d8b 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2403,25 +2403,7 @@ def allocate_destination_data_set( elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED and not dest_exists: # Taking the src as model if it's also a PDSE. if src_ds_type in data_set.DataSet.MVS_PARTITIONED: - if executable: - src_attributes = datasets.listing(src_name)[0] - size = int(src_attributes.total_space) - record_format = "U" - record_length = 0 - - dest_params = get_data_set_attributes( - dest, - size, - is_binary, - asa_text, - record_format=record_format, - record_length=record_length, - type="LIBRARY", - volume=volume - ) - data_set.DataSet.ensure_present(replace=force, **dest_params) - else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.listing(src_name)[0] # The size returned by listing is in bytes. From 5c520cbee7cf99646a006fef41709b8844c0beb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sat, 2 Dec 2023 16:55:39 -0600 Subject: [PATCH 230/413] Bugfix/374/module zos mvs raw errors with long multi line quoted string in content field (#1057) * Add function of write content * Push easy soultion for two cases * Fix identation and more issues * Fix identation and more issues * Solve error of null * Add validation comments and separete the code * Add fragment * Modify logics * Return overthink * Add explanation for the user and change logic * Add explanation for the user and change logic * Change documentation * Change fragment * Better error message, better documentation and fragment * Get better mesages * Change the logic * Change documentation * Change logic * Add scape to # * Check failing * Check failing * Add valid scapes * Update zos_mvs_raw fragment and module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- ...ti_line_quoted_string_in_content_field.yml | 12 ++ docs/source/modules/zos_mvs_raw.rst | 43 ++++++- plugins/modules/zos_mvs_raw.py | 115 +++++++++++++++++- 3 files changed, 162 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml diff --git a/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml b/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml new file mode 100644 index 000000000..49a3a3516 --- /dev/null +++ b/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml @@ -0,0 +1,12 @@ +minor_changes: + - zos_mvs_raw - when using the dd_input content option for instream-data, if + the content was not properly indented according to the program which is + generally a blank in columns 1 & 2, those columns would be truncated. Now, + when setting instream-data, the module will ensure that all lines contain + a blank in columns 1 and 2 and add blanks when not present while retaining + a maximum length of 80 columns for any line. This is true for all content + types; string, list of strings and when using a YAML block indicator. + (https://github.com/ansible-collections/ibm_zos_core/pull/1057). + - zos_mvs_raw - no examples were included with the module that demonstrated + using a YAML block indicator, this now includes examples using a YAML + block indicator. diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 33247a3e9..fb6a1a726 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -655,10 +655,12 @@ dds Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - If a multi-line string is provided make sure to use the proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + | **required**: True | **type**: raw @@ -1371,10 +1373,12 @@ dds Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - If a multi-line string is provided make sure to use the proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + | **required**: True | **type**: raw @@ -1708,6 +1712,35 @@ Examples return_content: type: text + - name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: yes + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: |2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) + @@ -1721,6 +1754,8 @@ Notes 2. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + 3. When executing a program, refer to the programs documentation as each programs requirments can vary fom DDs, instream-data indentation and continuation characters. + See Also diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 55937ea63..4eab2b023 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -556,10 +556,25 @@ - I(dd_input) supports single or multiple lines of input. - Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - - If a multi-line string is provided make sure to use the - proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + - 'If a multi-line string is provided, use the proper block scalar + style. YAML supports both + L(literal,https://yaml.org/spec/1.2.2/#literal-style) and + L(folded,https://yaml.org/spec/1.2.2/#line-folding) scalars. + It is recommended to use the literal style indicator + "|" with a block indentation indicator, for example; + I(content: | 2) is a literal block style indicator with a 2 space + indentation, the entire block will be indented and newlines + preserved. The block indentation range is 1 - 9. While generally + unnecessary, YAML does support block + L(chomping,https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator) + indicators "+" and "-" as well.' + - When using the I(content) option for instream-data, the module + will ensure that all lines contain a blank in columns 1 and 2 + and add blanks when not present while retaining a maximum length + of 80 columns for any line. This is true for all I(content) types; + string, list of strings and when using a YAML block indicator. required: true type: raw return_content: @@ -1155,10 +1170,25 @@ - I(dd_input) supports single or multiple lines of input. - Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - - If a multi-line string is provided make sure to use the - proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + - 'If a multi-line string is provided, use the proper block scalar + style. YAML supports both + L(literal,https://yaml.org/spec/1.2.2/#literal-style) and + L(folded,https://yaml.org/spec/1.2.2/#line-folding) scalars. + It is recommended to use the literal style indicator + "|" with a block indentation indicator, for example; + I(content: | 2) is a literal block style indicator with a 2 space + indentation, the entire block will be indented and newlines + preserved. The block indentation range is 1 - 9. While generally + unnecessary, YAML does support block + L(chomping,https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator) + indicators "+" and "-" as well.' + - When using the I(content) option for instream-data, the module + will ensure that all lines contain a blank in columns 1 and 2 + and add blanks when not present while retaining a maximum length + of 80 columns for any line. This is true for all I(content) types; + string, list of strings and when using a YAML block indicator. required: true type: raw return_content: @@ -1208,6 +1238,8 @@ - 2. L(zos_mvs_raw,./zos_mvs_raw.html) module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + - 3. When executing a program, refer to the programs documentation as each programs requirments + can vary fom DDs, instream-data indentation and continuation characters. seealso: - module: zos_data_set """ @@ -1522,6 +1554,35 @@ dd_name: sysprint return_content: type: text + + - name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: yes + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: |2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -2166,6 +2227,11 @@ def dd_content(contents, dependencies): """ if contents is None: return None + if contents is not None: + # Empty string can be passed for content but not modify to ensure proper entry + if len(contents) > 0: + contents = modify_contents(contents) + return contents if isinstance(contents, list): return "\n".join(contents) return contents @@ -3090,6 +3156,47 @@ def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=No return stdout +def modify_contents(contents): + """Return the content of dd_input to a valid form for a JCL program. + + Args: + contents (str or list): The string or list with the program. + + Returns: + contents: The content in a proper multi line str. + """ + if not isinstance(contents, list): + contents = list(contents.split("\n")) + contents = prepend_spaces(contents) + contents = "\n".join(contents) + return contents + + +def prepend_spaces(lines): + """Return the array with two spaces at the beggining. + + Args: + lines (list): The list with a line of a program. + + Returns: + new_lines: The list in a proper two spaces and the code. + """ + module = AnsibleModuleHelper(argument_spec={}) + for index, line in enumerate(lines): + if len(line) > 0: + if len(line) > 80: + module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + else: + if len(line) > 1 and line[0] != " " and line[1] != " ": + if len(line) > 78: + module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + else: + lines[index] = " {0}".format(line) + return lines + + class ZOSRawError(Exception): def __init__(self, program="", error=""): self.msg = "An error occurred during execution of z/OS program {0}. {1}".format( From d0cb7e87c31d348679194635b0ff6aba6e292f95 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 4 Dec 2023 14:27:57 -0600 Subject: [PATCH 231/413] Remove tarfile.exctractall and zipfile.extractall in favor of individual member extraction for sanity (#1077) * Implemented extract_all function * Added changelog * Removed commented lines --- changelogs/fragments/1077-modify-uss-extraction.yml | 3 +++ plugins/modules/zos_unarchive.py | 8 ++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1077-modify-uss-extraction.yml diff --git a/changelogs/fragments/1077-modify-uss-extraction.yml b/changelogs/fragments/1077-modify-uss-extraction.yml new file mode 100644 index 000000000..0886dfab1 --- /dev/null +++ b/changelogs/fragments/1077-modify-uss-extraction.yml @@ -0,0 +1,3 @@ +trivial: + - zos_unarchive - Change the USS file extraction method from extractall to a custom function to extract filtered members. + (https://github.com/ansible-collections/ibm_zos_core/pull/1077). \ No newline at end of file diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 81737ed29..9ab1409ca 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -462,6 +462,10 @@ def result(self): 'missing': self.missing, } + def extract_all(self, members): + for member in members: + self.file.extract(member) + class TarUnarchive(Unarchive): def __init__(self, module): @@ -527,7 +531,7 @@ def extract_src(self): self.file.extract(path) self.targets.append(path) else: - self.file.extractall(members=sanitize_members(self.file.getmembers(), self.dest, self.format)) + self.extract_all(members=sanitize_members(self.file.getmembers(), self.dest, self.format)) self.targets = files_in_archive self.file.close() # Returning the current working directory to what it was before to not @@ -598,7 +602,7 @@ def extract_src(self): self.file.extract(path) self.targets.append(path) else: - self.file.extractall(members=sanitize_members(self.file.infolist(), self.dest, self.format)) + self.extract_all(members=sanitize_members(self.file.infolist(), self.dest, self.format)) self.targets = files_in_archive self.file.close() # Returning the current working directory to what it was before to not From e6bda1b32d1b5dbcda96355bbb64694873aaebcf Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 4 Dec 2023 13:12:13 -0800 Subject: [PATCH 232/413] Update ac tool version of pyyaml to avoid wheel issue Signed-off-by: ddimatos <dimatos@gmail.com> --- scripts/requirements-common.env | 4 ++-- scripts/venv.sh | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/requirements-common.env b/scripts/requirements-common.env index 365b8aa4f..5f76436bf 100644 --- a/scripts/requirements-common.env +++ b/scripts/requirements-common.env @@ -67,7 +67,7 @@ requirements=( "oyaml" "Parsley" "PyNaCl" -"PyYAML" +"PyYAML:6.0.1" "Pygments" "packaging" "paramiko" @@ -130,4 +130,4 @@ requirements=( # "oyaml" # "mock" # "pytest-ansible" -# ) \ No newline at end of file +# ) diff --git a/scripts/venv.sh b/scripts/venv.sh index 5ec946c49..315e7a854 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -280,6 +280,8 @@ create_venv_and_pip_install_req(){ find_in_path() { result="" + OTHER_PYTHON_PATHS="/Library/Frameworks/Python.framework/Versions/Current/bin:/opt/homebrew/bin:" + PATH="${OTHER_PYTHON_PATHS}${PATH}" IFS=: for x in $PATH; do if [ -x "$x/$1" ]; then @@ -295,7 +297,7 @@ find_in_path() { discover_python(){ # Don't use which, it only will find first in path within script # for python_found in `which python3 | cut -d" " -f3`; do - pys=("python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") + pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") #pys=("python3.8" "python3.9") for py in "${pys[@]}"; do for python_found in `find_in_path $py`; do From e1101237c4e8dd670c0a4765da684fa5f25709fb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 4 Dec 2023 13:25:22 -0800 Subject: [PATCH 233/413] Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1048-update-ac-tool-pyyaml-version.yml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml diff --git a/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml b/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml new file mode 100644 index 000000000..309862cfb --- /dev/null +++ b/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml @@ -0,0 +1,8 @@ +trivial: + - ac - PyYaml version 5.4.1 was being installed and not having a wheel to go + with the python versions 11 and 12. This fixes the issue by freezing the + the version to 6.0.1. + - ac - would not discover python installations not in PATH. This change + extends the search path to include common python installation locations + not in path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1083). From 1232079e6ee5cbc4d3ece06f484dc90711a9dba1 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 4 Dec 2023 16:09:16 -0600 Subject: [PATCH 234/413] Fixed sanity issues with zos_mvs_raw (#1084) --- plugins/modules/zos_mvs_raw.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 4eab2b023..502d2ead7 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1239,7 +1239,7 @@ "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. - 3. When executing a program, refer to the programs documentation as each programs requirments - can vary fom DDs, instream-data indentation and continuation characters. + can vary fom DDs, instream-data indentation and continuation characters. seealso: - module: zos_data_set """ @@ -3185,13 +3185,15 @@ def prepend_spaces(lines): for index, line in enumerate(lines): if len(line) > 0: if len(line) > 80: - module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. - If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ + module.fail_json(msg=msg.format(line)) else: if len(line) > 1 and line[0] != " " and line[1] != " ": if len(line) > 78: - module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. - If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ + module.fail_json(msg=msg.format(line)) else: lines[index] = " {0}".format(line) return lines From 5abdfd5077de5aacc7efc3f818e3a8eb96d9cbcf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 5 Dec 2023 23:07:04 -0600 Subject: [PATCH 235/413] [bugfix][v1.9.0][zos_unarchive]action plugin does not clean up remote temporary files after completion (#1073) * Push solution * Add fragment * Modify logic for fails and ensure works * Change bug * Chamge fragment * Chamge fragment * Add comma --------- Co-authored-by: Demetri <dimatos@gmail.com> --- ...up_remote_temporary_files_after_completion.yml | 4 ++++ plugins/action/zos_unarchive.py | 15 ++++++++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml diff --git a/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml b/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml new file mode 100644 index 000000000..6532e60ae --- /dev/null +++ b/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. + Change now removes temporary files. + (https://github.com/ansible-collections/ibm_zos_core/pull/1073). \ No newline at end of file diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index d808647ef..6e679d62d 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -46,6 +46,9 @@ def run(self, tmp=None, task_vars=None): module_args = self._task.args.copy() + tmp_files = "" + uss_format = None + if module_args.get("remote_src", False): result.update( self._execute_module( @@ -67,9 +70,10 @@ def run(self, tmp=None, task_vars=None): source = os.path.realpath(source) if format_name in USS_SUPPORTED_FORMATS: - dest = self._execute_module( + tmp_files = dest = self._execute_module( module_name="tempfile", module_args={}, task_vars=task_vars, ).get("path") + uss_format = format_name elif format_name in MVS_SUPPORTED_FORMATS: if dest_data_set is None: dest_data_set = dict() @@ -120,4 +124,13 @@ def run(self, tmp=None, task_vars=None): ) else: result.update(dict(failed=True)) + + if not module_args.get("remote_src", False) and uss_format: + self._remote_cleanup(tmp_files) + return result + + def _remote_cleanup(self, tempfile_path): + """Removes the temporary file in a managed node created for a local + script.""" + self._connection.exec_command("rm -f {0}".format(tempfile_path)) From 0b8a919472768b531fdf44cf1baa07c0ac00f4b8 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 5 Dec 2023 23:11:02 -0600 Subject: [PATCH 236/413] [v1.9.0] Bugfix/837/missing ZOAU imports (#1042) * Added new missing import handler to zos_operator * Added new import handler to job util and zos_job_query * Added ZOAU check to zos_operator * Fixed sanity test issue * Fixed Python 2.7 sanity issue * Add changelog fragment --------- Co-authored-by: Demetri <dimatos@gmail.com> --- .../fragments/1042-missing-zoau-imports.yml | 10 +++++ plugins/module_utils/import_handler.py | 42 +++++++++++++++++++ plugins/module_utils/job.py | 31 +++++++++----- plugins/modules/zos_job_query.py | 3 +- plugins/modules/zos_operator.py | 15 +++++-- 5 files changed, 85 insertions(+), 16 deletions(-) create mode 100644 changelogs/fragments/1042-missing-zoau-imports.yml diff --git a/changelogs/fragments/1042-missing-zoau-imports.yml b/changelogs/fragments/1042-missing-zoau-imports.yml new file mode 100644 index 000000000..a91f6de48 --- /dev/null +++ b/changelogs/fragments/1042-missing-zoau-imports.yml @@ -0,0 +1,10 @@ +bugfixes: + - zos_job_query - The module handling ZOAU import errors obscured the + original traceback when an import error ocurred. Fix now passes correctly + the context to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + + - zos_operator - The module handling ZOAU import errors obscured the + original traceback when an import error ocurred. Fix now passes correctly + the context to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1042). \ No newline at end of file diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py index 3e774f53a..a7b41a619 100644 --- a/plugins/module_utils/import_handler.py +++ b/plugins/module_utils/import_handler.py @@ -27,6 +27,48 @@ def method(*args, **kwargs): return method +class ZOAUImportError(object): + """This class serves as a wrapper for any kind of error when importing + ZOAU. Since ZOAU is used by both modules and module_utils, we need a way + to alert the user when they're trying to use a function that couldn't be + imported properly. If we only had to deal with this in modules, we could + just validate that imports worked at the start of their main functions, + but on utils, we don't have an entry point where we can validate this. + Just raising an exception when trying the import would be better, but that + introduces a failure on Ansible sanity tests, so we can't do it. + + Instead, we'll replace what would've been a ZOAU library with this class, + and the moment ANY method gets called, we finally raise an exception. + """ + + def __init__(self, exception_traceback): + """When creating a new instance of this class, we save the traceback + from the original exception so that users have more context when their + task/code fails. The expected traceback is a string representation of + it, not an actual traceback object. By importing `traceback` from the + standard library and calling `traceback.format_exc()` we can + get this string. + """ + self.traceback = exception_traceback + + def __getattr__(self, name): + """This code is virtually the same from `MissingZOAUImport`. What we + do here is hijack all calls to any method from a missing ZOAU library + and instead return a method that will alert the user that there was + an error while importing ZOAU. + """ + def method(*args, **kwargs): + raise ImportError( + ( + "ZOAU is not properly configured for Ansible. Unable to import zoautil_py. " + "Ensure environment variables are properly configured in Ansible for use with ZOAU. " + "Complete traceback: {0}".format(self.traceback) + ) + ) + + return method + + class MissingImport(object): def __init__(self, import_name=""): self.import_name = import_name diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index bf23bf5bc..94a65d8c3 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -15,21 +15,30 @@ import fnmatch import re +import traceback from time import sleep from timeit import default_timer as timer from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + # MissingZOAUImport, + ZOAUImportError ) try: - from zoautil_py.jobs import read_output, list_dds, listing + # For files that import individual functions from a ZOAU module, + # we'll replace the imports to instead get the module. + # This way, we'll always make a call to the module, allowing us + # to properly get the exception we need and avoid the issue + # described in #837. + # from zoautil_py.jobs import read_output, list_dds, listing + from zoautil_py import jobs except Exception: - read_output = MissingZOAUImport() - list_dds = MissingZOAUImport() - listing = MissingZOAUImport() + # read_output = MissingZOAUImport() + # list_dds = MissingZOAUImport() + # listing = MissingZOAUImport() + jobs = ZOAUImportError(traceback.format_exc()) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( zoau_version_checker @@ -204,7 +213,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] # e.g.: OMVSADM HELLO JOB00126 JCLERR ? - # listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not + # jobs.listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] # creationdatetime=job[9] queueposition=job[10] @@ -217,13 +226,13 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T kwargs = { "job_id": job_id_temp, } - entries = listing(**kwargs) + entries = jobs.listing(**kwargs) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = listing(**kwargs) + entries = jobs.listing(**kwargs) if entries: for entry in entries: @@ -275,12 +284,12 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.id) while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.id) job["duration"] = duration @@ -325,7 +334,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "stepname" in single_dd: if "dataset" in single_dd: - tmpcont = read_output( + tmpcont = jobs.read_output( entry.id, single_dd["stepname"], single_dd["dataset"]) dd["content"] = tmpcont.split("\n") diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 283467766..cf94fa684 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -260,6 +260,7 @@ ) from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text import re @@ -287,7 +288,7 @@ def run_module(): jobs = None except Exception as e: - module.fail_json(msg=e, **result) + module.fail_json(msg=to_text(e), **result) result["jobs"] = jobs module.exit_json(**result) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 969890ba5..273b0a867 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -144,14 +144,17 @@ sample: true """ +import traceback from timeit import default_timer as timer from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + # MissingZOAUImport, + ZOAUImportError ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -161,7 +164,7 @@ try: from zoautil_py import opercmd except Exception: - opercmd = MissingZOAUImport() + opercmd = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import ZOAU_API_VERSION @@ -190,6 +193,10 @@ def run_module(): result = dict(changed=False) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) + # Checking that we can actually use ZOAU. + if isinstance(opercmd, ZOAUImportError): + module.fail_json(msg="An error ocurred while importing ZOAU: {0}".format(opercmd.traceback)) + try: new_params = parse_params(module.params) rc_message = run_operator_command(new_params) @@ -241,10 +248,10 @@ def run_module(): stderr_lines=str(error).splitlines() if error is not None else result["content"], changed=result["changed"],) except Error as e: - module.fail_json(msg=repr(e), **result) + module.fail_json(msg=to_text(e), **result) except Exception as e: module.fail_json( - msg="An unexpected error occurred: {0}".format(repr(e)), **result + msg="An unexpected error occurred: {0}".format(to_text(e)), **result ) module.exit_json(**result) From 5b4a3bbba15199666cf12992e2cb1ad87f0c99e6 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 5 Dec 2023 23:19:22 -0600 Subject: [PATCH 237/413] [Enhancement][zos_job_submit] Modify error messages to be clearer (#1074) * Improved error messages * Updated failure logic --------- Co-authored-by: Demetri <dimatos@gmail.com> --- .../1074-improve-job-submit-error-msgs.yml | 3 ++ plugins/action/zos_job_submit.py | 34 ++++++------------- 2 files changed, 14 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/1074-improve-job-submit-error-msgs.yml diff --git a/changelogs/fragments/1074-improve-job-submit-error-msgs.yml b/changelogs/fragments/1074-improve-job-submit-error-msgs.yml new file mode 100644 index 000000000..769131a2b --- /dev/null +++ b/changelogs/fragments/1074-improve-job-submit-error-msgs.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_job_submit - Improve error messages in zos_job_submit to be clearer. + (https://github.com/ansible-collections/ibm_zos_core/pull/1074). diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 630ce7969..c28fcec76 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -57,15 +57,18 @@ def run(self, tmp=None, task_vars=None): source = self._task.args.get("src", None) # Get a temporary file on the managed node - dest_path = self._execute_module( - module_name="tempfile", module_args={}, task_vars=task_vars, - ).get("path") + tempfile = self._execute_module( + module_name="tempfile", module_args=dict(state="file"), task_vars=task_vars, + ) + dest_path = tempfile.get("path") result["failed"] = True - if source is None or dest_path is None: - result["msg"] = "src and dest are required" - elif source is not None and source.endswith("/"): - result["msg"] = "src must be a file" + if source is None: + result["msg"] = "Source is required." + elif dest_path is None: + result["msg"] = "Failed copying to remote, destination file was not created. {0}".format(tempfile.get("msg")) + elif source is not None and os.path.isdir(to_bytes(source, errors="surrogate_or_strict")): + result["msg"] = "Source must be a file." else: del result["failed"] @@ -79,11 +82,6 @@ def run(self, tmp=None, task_vars=None): result["msg"] = to_text(e) return result - if os.path.isdir(to_bytes(source, errors="surrogate_or_strict")): - result["failed"] = True - result["msg"] = to_text("NOT SUPPORTING THE DIRECTORY.") - return result - if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() @@ -93,7 +91,7 @@ def run(self, tmp=None, task_vars=None): # source_rel = os.path.basename(source) except AnsibleFileNotFound as e: result["failed"] = True - result["msg"] = "could not find src=%s, %s" % (source_full, e) + result["msg"] = "Source {0} not found. {1}".format(source_full, e) self._remove_tmp_path(tmp) return result @@ -102,16 +100,6 @@ def run(self, tmp=None, task_vars=None): # else: dest_file = self._connection._shell.join_path(dest_path) - dest_status = self._execute_remote_stat( - dest_file, all_vars=task_vars, follow=False - ) - - if dest_status["exists"] and dest_status["isdir"]: - self._remove_tmp_path(tmp) - result["failed"] = True - result["msg"] = "can not use content with a dir as dest" - return result - tmp_src = self._connection._shell.join_path(tmp, "source") rendered_file = None From 87f1523c8fbb3ba707ce2aca71a0cd0b5093986d Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Thu, 7 Dec 2023 08:37:22 -0800 Subject: [PATCH 238/413] update link in managed_node doc (#1089) * update link in managed_node doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Updated docs regarding managed node Signed-off-by: ddimatos <dimatos@gmail.com> * Doc fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../1089-update-managed_node_doc.yml | 3 + docs/source/requirements-single.rst | 150 +++++++----------- docs/source/requirements_managed.rst | 147 +++++++---------- 3 files changed, 110 insertions(+), 190 deletions(-) create mode 100644 changelogs/fragments/1089-update-managed_node_doc.yml diff --git a/changelogs/fragments/1089-update-managed_node_doc.yml b/changelogs/fragments/1089-update-managed_node_doc.yml new file mode 100644 index 000000000..e0c7ff18b --- /dev/null +++ b/changelogs/fragments/1089-update-managed_node_doc.yml @@ -0,0 +1,3 @@ +trivial: + - managed node doc - updated the managed node documentation links and content. + (https://github.com/ansible-collections/ibm_zos_core/pull/1089). diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst index ca745f178..3f0b2b8e0 100644 --- a/docs/source/requirements-single.rst +++ b/docs/source/requirements-single.rst @@ -20,7 +20,7 @@ The controller is where the Ansible engine that runs the playbook is installed. Refer to RedHat Ansible Certified Content documentation for more on the `controllers dependencies`_. .. _controllers dependencies: - https://ibm.github.io/z_ansible_collections_doc/requirements/requirements_controller.html + https://ibm.github.io/z_ansible_collections_doc/requirements/requirements.html#control-node .. ........................................................................... .. © Copyright IBM Corporation 2020 . .. ........................................................................... @@ -29,130 +29,86 @@ Managed node ============ The managed z/OS node is the host that is managed by Ansible, as identified in -the Ansible inventory. -The managed node has dependencies that are specific to each release of the -**IBM z/OS core collection**. Review the details of the dependencies before you -proceed to install the IBM z/OS core collection. +the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, +some dependencies are required to be installed on z/OS such as: -* z/OS `V2R3`_ or `later`_ +* `z/OS`_ * `z/OS OpenSSH`_ -* Supported by `IBM Open Enterprise SDK for Python`_ - (previously `IBM Open Enterprise Python for z/OS`_) 3.8.2 or later -* `IBM Z Open Automation Utilities`_ (ZOAU) +* `z/OS® shell`_ +* `IBM Open Enterprise SDK for Python`_ +* `IBM Z Open Automation Utilities`_ .. note:: - IBM z/OS core collection is dependent on specific versions of - Z Open Automation Utilities (ZOAU). For information about the required - version of ZOAU, review the `release notes`_. For detailed instructions on - installation and configuration of ZOAU, - `Installing and Configuring ZOA Utilities`_. + Each release of the IBM z/OS core collection depends on specific dependency + versions. For information on the dependencies or the versions, review the + `release notes`_ reference section. -* The `z/OS® shell`_ +z/OS shell +---------- - .. note:: - Currently, only ``z/OS® shell`` is supported. Using - ``ansible_shell_executable`` to change the default shell is discouraged. - For more information, see `Ansible documentation`_. Shells such as ``bash`` - are not supported because they handle the reading and writing of untagged - files differently. Please review the README.ZOS guide included with the - ported ``bash`` shell for further configurations. - -.. _Installing and Configuring ZOA Utilities: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE_1.1.0/install.html - -.. _Ansible documentation: - https://docs.ansible.com/ansible/2.7/user_guide/intro_inventory.html - -.. _Python on z/OS: - requirements-single.html#id1 - -.. _V2R3: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html - -.. _later: - https://www.ibm.com/support/knowledgecenter/SSLTBW - -.. _IBM Z Open Automation Utilities: - requirements-single.html#id1 - -.. _z/OS OpenSSH: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.2.0/com.ibm.zos.v2r2.e0za100/ch1openssh.htm +Currently, only the `z/OS® shell`_ is supported. Using ``ansible_shell_executable`` +to change the default shell is discouraged. Shells such as ``bash`` are not supported +because it handles the reading and writing of untagged files differently. -.. _release notes: - release_notes.html - -.. _playbook configuration: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md - -.. _FAQs: - https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html - -.. _z/OS® shell: - https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm - -.. _Z Open Automation Utilities 1.1.0: - https://www.ibm.com/support/knowledgecenter/SSKFYE_1.1.0/install.html - -.. _configured IBM Open Enterprise Python on z/OS: - https://www.ibm.com/support/knowledgecenter/SSCH7P_3.8.0/install.html +Open Enterprise SDK for Python +------------------------------ -Python on z/OS --------------- - -If the Ansible target is z/OS, you must install -**IBM Open Enterprise Python for z/OS** which is ported for the z/OS platform -and required by **IBM z/OS core collection**. +The **IBM z/OS core collection** requires that the **IBM Open Enterprise SDK for Python** +be installed on z/OS. **Installation** -* Visit the `IBM Open Enterprise Python for z/OS`_ product page for FMID, +* Visit the `IBM Open Enterprise SDK for Python`_ product page for the FMID, program directory, fix list, latest PTF, installation and configuration instructions. * For reference, the Program IDs are: * 5655-PYT for the base product * 5655-PYS for service and support -* Optionally, download **IBM Open Enterprise Python for z/OS**, `here`_ -* For the supported Python version, refer to the `release notes`_. +* Optionally, `download the IBM Open Enterprise SDK for Python`_ no cost + addition for installation. -.. _IBM Open Enterprise Python for z/OS: - http://www.ibm.com/products/open-enterprise-python-zos +IBM Z Open Automation Utilities +------------------------------- -.. _IBM Open Enterprise SDK for Python: - https://www.ibm.com/products/open-enterprise-python-zos +IBM Z Open Automation Utilities provide support for executing automation tasks +on z/OS. It can run z/OS programs such as IEBCOPY, IDCAMS and IKJEFT01, perform +data set operations and much more in the scripting language of your choice. -.. _here: - https://www-01.ibm.com/marketing/iwm/platform/mrs/assets?source=swg-ibmoep +**Installation** -.. note:: +* Visit the `IBM Z Open Automation Utilities`_ product page for the FMID, + program directory, fix list, latest PTF, installation, and configuration + instructions. +* For reference, the Program IDs are: - Currently, IBM Open Enterprise Python for z/OS is the supported and - recommended Python distribution for use with Ansible and ZOAU. If - Rocket Python is the only available Python on the target, review the - `recommended environment variables`_ for Rocket Python. + * 5698-PA1 for the base product + * 5698-PAS for service and support +* Optionally, `download the IBM Z Open Automation Utilities`_ no cost + addition for installation. -.. _recommended environment variables: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md#variables -ZOAU ----- +.. _z/OS: + https://www.ibm.com/docs/en/zos -IBM Z Open Automation Utilities provide support for executing automation tasks -on z/OS. With ZOAU, you can run traditional MVS commands such as IEBCOPY, -IDCAMS, and IKJEFT01, as well as perform a number of data set operations -in the scripting language of your choice. +.. _z/OS OpenSSH: + https://www.ibm.com/docs/en/zos/latest?topic=zbed-zos-openssh -**Installation** +.. _z/OS® shell: + https://www.ibm.com/docs/en/zos/latest?topic=guide-zos-shells -* Visit the `ZOAU`_ product page for the FMID, program directory, fix list, - latest PTF, installation, and configuration instructions. -* For reference, the Program IDs are: +.. _IBM Open Enterprise SDK for Python: + https://www.ibm.com/products/open-enterprise-python-zos - * 5698-PA1 for the base product - * 5698-PAS for service and support -* For ZOAU supported version, refer to the `release notes`_. +.. _IBM Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau + +.. _release notes: + release_notes.html -.. _ZOAU: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE +.. _download the IBM Open Enterprise SDK for Python: + https://www.ibm.com/account/reg/us-en/signup?formid=urx-49465 +.. _download the IBM Z Open Automation Utilities: + https://ibm.github.io/mainframe-downloads/downloads.html#devops \ No newline at end of file diff --git a/docs/source/requirements_managed.rst b/docs/source/requirements_managed.rst index 8be719819..24cb80f45 100644 --- a/docs/source/requirements_managed.rst +++ b/docs/source/requirements_managed.rst @@ -6,126 +6,87 @@ Managed node ============ The managed z/OS node is the host that is managed by Ansible, as identified in -the Ansible inventory. -The managed node has dependencies that are specific to each release of the -**IBM z/OS core collection**. Review the details of the dependencies before you -proceed to install the IBM z/OS core collection. +the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, +some dependencies are required to be installed on z/OS such as: -* z/OS `V2R3`_ or `later`_ +* `z/OS`_ * `z/OS OpenSSH`_ -* Supported by `IBM Open Enterprise SDK for Python`_ - (previously `IBM Open Enterprise Python for z/OS`_) 3.8.2 or later -* `IBM Z Open Automation Utilities`_ (ZOAU) +* `z/OS® shell`_ +* `IBM Open Enterprise SDK for Python`_ +* `IBM Z Open Automation Utilities`_ - .. note:: - IBM z/OS core collection is dependent on specific versions of - Z Open Automation Utilities (ZOAU). For information about the required - version of ZOAU, review the `release notes`_. For detailed instructions on - installation and configuration of ZOAU, - `Installing and Configuring ZOA Utilities`_. - -* The `z/OS® shell`_ - - .. note:: - Currently, only ``z/OS® shell`` is supported. Using - ``ansible_shell_executable`` to change the default shell is discouraged. - For more information, see `Ansible documentation`_. Shells such as ``bash`` - are not supported because they handle the reading and writing of untagged - files differently. Please review the README.ZOS guide included with the - ported ``bash`` shell for further configurations. - -.. _Ansible documentation: - https://docs.ansible.com/ansible/2.7/user_guide/intro_inventory.html - -.. _Python on z/OS: - requirements_managed.html#id1 - -.. _Installing and Configuring ZOA Utilities: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE_1.1.0/install.html - -.. _V2R3: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html - -.. _later: - https://www.ibm.com/support/knowledgecenter/SSLTBW - -.. _IBM Z Open Automation Utilities: - requirements_managed.html#zoau - -.. _z/OS OpenSSH: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.2.0/com.ibm.zos.v2r2.e0za100/ch1openssh.htm - -.. _release notes: - release_notes.html - -.. _playbook configuration: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md +.. note:: -.. _z/OS® shell: - https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm + Each release of the IBM z/OS core collection depends on specific dependency + versions. For information on the dependencies or the versions, review the + `release notes`_ reference section. -.. _Z Open Automation Utilities 1.1.0: - https://www.ibm.com/support/knowledgecenter/SSKFYE_1.1.0/install.html +z/OS shell +---------- -.. _configured IBM Open Enterprise Python on z/OS: - https://www.ibm.com/support/knowledgecenter/SSCH7P_3.8.0/install.html +Currently, only the `z/OS® shell`_ is supported. Using ``ansible_shell_executable`` +to change the default shell is discouraged. Shells such as ``bash`` are not supported +because it handles the reading and writing of untagged files differently. -Python on z/OS --------------- +Open Enterprise SDK for Python +------------------------------ -If the Ansible target is z/OS, you must install -**IBM Open Enterprise Python for z/OS** which is ported for the z/OS platform -and required by **IBM z/OS core collection**. +The **IBM z/OS core collection** requires that the **IBM Open Enterprise SDK for Python** +be installed on z/OS. **Installation** -* Visit the `IBM Open Enterprise Python for z/OS`_ product page for FMID, +* Visit the `IBM Open Enterprise SDK for Python`_ product page for the FMID, program directory, fix list, latest PTF, installation and configuration instructions. * For reference, the Program IDs are: * 5655-PYT for the base product * 5655-PYS for service and support -* Optionally, download **IBM Open Enterprise Python for z/OS**, `here`_ -* For the supported Python version, refer to the `release notes`_. - -.. _IBM Open Enterprise Python for z/OS: - http://www.ibm.com/products/open-enterprise-python-zos -.. _IBM Open Enterprise SDK for Python: - https://www.ibm.com/products/open-enterprise-python-zos +* Optionally, `download the IBM Open Enterprise SDK for Python`_ no cost + addition for installation. -.. _here: - https://www-01.ibm.com/marketing/iwm/platform/mrs/assets?source=swg-ibmoep - -.. note:: - - Currently, IBM Open Enterprise Python for z/OS is the supported and - recommended Python distribution for use with Ansible and ZOAU. If - Rocket Python is the only available Python on the target, review the - `recommended environment variables`_ for Rocket Python. - -.. _recommended environment variables: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md#variables - -ZOAU ----- +IBM Z Open Automation Utilities +------------------------------- IBM Z Open Automation Utilities provide support for executing automation tasks -on z/OS. With ZOAU, you can run traditional MVS commands such as IEBCOPY, -IDCAMS, and IKJEFT01, as well as perform a number of data set operations -in the scripting language of your choice. +on z/OS. It can run z/OS programs such as IEBCOPY, IDCAMS and IKJEFT01, perform +data set operations and much more in the scripting language of your choice. **Installation** -* Visit the `ZOAU`_ product page for the FMID, program directory, fix list, - latest PTF, installation, and configuration instructions. +* Visit the `IBM Z Open Automation Utilities`_ product page for the FMID, + program directory, fix list, latest PTF, installation, and configuration + instructions. * For reference, the Program IDs are: * 5698-PA1 for the base product * 5698-PAS for service and support -* For ZOAU supported version, refer to the `release notes`_. +* Optionally, `download the IBM Z Open Automation Utilities`_ no cost + addition for installation. + + +.. _z/OS: + https://www.ibm.com/docs/en/zos + +.. _z/OS OpenSSH: + https://www.ibm.com/docs/en/zos/latest?topic=zbed-zos-openssh + +.. _z/OS® shell: + https://www.ibm.com/docs/en/zos/latest?topic=guide-zos-shells + +.. _IBM Open Enterprise SDK for Python: + https://www.ibm.com/products/open-enterprise-python-zos + +.. _IBM Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau + +.. _release notes: + release_notes.html -.. _ZOAU: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE +.. _download the IBM Open Enterprise SDK for Python: + https://www.ibm.com/account/reg/us-en/signup?formid=urx-49465 +.. _download the IBM Z Open Automation Utilities: + https://ibm.github.io/mainframe-downloads/downloads.html#devops \ No newline at end of file From 6868de2134d88f8012d2c751bedd541c6217a014 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Thu, 7 Dec 2023 15:56:15 -0600 Subject: [PATCH 239/413] First check of solution --- plugins/action/zos_job_submit.py | 1 - plugins/modules/zos_job_submit.py | 21 +++++++++------------ tests/sanity/ignore-2.15.txt | 3 --- 3 files changed, 9 insertions(+), 16 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index c28fcec76..e7d4128ed 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -135,7 +135,6 @@ def run(self, tmp=None, task_vars=None): result = {} copy_module_args = {} module_args = self._task.args.copy() - module_args["temp_file"] = dest_path copy_module_args.update( dict( diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 11f0f3ccb..397598e42 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -611,7 +611,7 @@ job_output, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( data_set, @@ -624,19 +624,19 @@ from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove +import traceback from time import sleep import re try: - from zoautil_py.exceptions import ZOAUException, JobSubmitException + from zoautil_py import exceptions except ImportError: - ZOAUException = MissingZOAUImport() - JobSubmitException = MissingZOAUImport() + exceptions = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import jobs except Exception: - jobs = MissingZOAUImport() + jobs = ZOAUImportError(traceback.format_exc()) if PY3: from shlex import quote @@ -731,7 +731,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, # ZOAU throws a ZOAUException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed # in the job_output parser. - except ZOAUException as err: + except exceptions.ZOAUException as err: result["changed"] = False result["failed"] = True result["stderr"] = str(err) @@ -746,7 +746,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id # has been returned within the allocated time. - except JobSubmitException as err: + except exceptions.JobSubmitException as err: result["changed"] = False result["failed"] = False result["stderr"] = str(err) @@ -815,7 +815,6 @@ def run_module(): return_output=dict(type="bool", required=False, default=True), wait_time_s=dict(type="int", default=10), max_rc=dict(type="int", required=False), - temp_file=dict(type="path", required=False), use_template=dict(type='bool', default=False), template_parameters=dict( type='dict', @@ -877,7 +876,6 @@ def run_module(): return_output=dict(arg_type="bool", default=True), wait_time_s=dict(arg_type="int", required=False, default=10), max_rc=dict(arg_type="int", required=False), - temp_file=dict(arg_type="path", required=False), ) # ******************************************************************** @@ -902,7 +900,7 @@ def run_module(): from_encoding = parsed_args.get("from_encoding") to_encoding = parsed_args.get("to_encoding") # temporary file names for copied files when user sets location to LOCAL - temp_file = parsed_args.get("temp_file") + temp_file = parsed_args.get("src") temp_file_encoded = None # Default 'changed' is False in case the module is not able to execute @@ -920,7 +918,6 @@ def run_module(): job_submitted_id = None duration = 0 start_time = timer() - if location == "DATA_SET": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) @@ -1039,7 +1036,7 @@ def run_module(): module.exit_json(**result) finally: - if temp_file: + if location != "DATA_SET" and location != "USS": remove(temp_file) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 415196660..8099f00e0 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -17,9 +17,6 @@ plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed un plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From f53998050612663991457aee49a35d5b9a2eed4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Mon, 11 Dec 2023 12:31:57 -0600 Subject: [PATCH 240/413] Add clear solution --- plugins/action/zos_job_submit.py | 1 + plugins/modules/zos_job_submit.py | 36 ++++--------------------------- 2 files changed, 5 insertions(+), 32 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index e7d4128ed..c98f1d451 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -142,6 +142,7 @@ def run(self, tmp=None, task_vars=None): dest=dest_path, mode="0600", force=True, + encoding=module_args.get('encoding'), remote_src=True, ) ) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 397598e42..ff975dbc1 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -620,7 +620,6 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove @@ -638,12 +637,6 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) -if PY3: - from shlex import quote -else: - from pipes import quote - - JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) MAX_WAIT_TIME_S = 86400 @@ -922,32 +915,11 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) elif location == "USS": - job_submitted_id, duration = submit_src_jcl(module, src, src_name=src, timeout=wait_time_s, hfs=True) + job_submitted_id, duration = submit_src_jcl( + module, src, src_name=src, timeout=wait_time_s, hfs=True) else: - # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC - conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( - from_encoding, - to_encoding, - quote(temp_file), - quote(temp_file_encoded.name), - ) - - conv_rc, stdout, stderr = module.run_command( - conv_str, - use_unsafe_shell=True, - ) - - if conv_rc == 0: - job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) - else: - result["failed"] = True - result["stdout"] = stdout - result["stderr"] = stderr - result["msg"] = ("Failed to convert the src {0} from encoding {1} to " - "encoding {2}, unable to submit job." - .format(src, from_encoding, to_encoding)) - module.fail_json(**result) + job_submitted_id, duration = submit_src_jcl( + module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) try: # Explictly pass None for the unused args else a default of '*' will be From 567f3846bcd1934410fe46d7152b4eb616e90154 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 11:47:14 -0600 Subject: [PATCH 241/413] Return local --- plugins/action/zos_job_submit.py | 1 - plugins/modules/zos_job_submit.py | 32 +++++++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index c98f1d451..e7d4128ed 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -142,7 +142,6 @@ def run(self, tmp=None, task_vars=None): dest=dest_path, mode="0600", force=True, - encoding=module_args.get('encoding'), remote_src=True, ) ) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index ff975dbc1..4177d9e94 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -620,6 +620,7 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six import PY3 from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove @@ -637,6 +638,11 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) +if PY3: + from shlex import quote +else: + from pipes import quote + JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) MAX_WAIT_TIME_S = 86400 @@ -918,8 +924,30 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=True) else: - job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) + # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC + conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( + from_encoding, + to_encoding, + quote(temp_file), + quote(temp_file_encoded.name), + ) + + conv_rc, stdout, stderr = module.run_command( + conv_str, + use_unsafe_shell=True, + ) + + if conv_rc == 0: + job_submitted_id, duration = submit_src_jcl( + module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) + else: + result["failed"] = True + result["stdout"] = stdout + result["stderr"] = stderr + result["msg"] = ("Failed to convert the src {0} from encoding {1} to " + "encoding {2}, unable to submit job." + .format(src, from_encoding, to_encoding)) + module.fail_json(**result) try: # Explictly pass None for the unused args else a default of '*' will be From f726333503f4510cfeae408bd784eb59ec52af6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 11:49:18 -0600 Subject: [PATCH 242/413] Add test ignores --- tests/sanity/ignore-2.14.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 415196660..8099f00e0 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -17,9 +17,6 @@ plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed un plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 3d5d7e86de002532c4d71a50b79a976e3dcd0ebb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 14:46:28 -0600 Subject: [PATCH 243/413] Add ignore to 2.16 --- tests/sanity/ignore-2.16.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 415196660..8099f00e0 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -17,9 +17,6 @@ plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed un plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 8af3270af80d0db6799eb7e4e954fc54450ee9cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 17:37:57 -0600 Subject: [PATCH 244/413] Optimize the encoding --- ...cumented_argument_and_import_exception.yml | 10 +++++ plugins/action/zos_job_submit.py | 1 + plugins/modules/zos_job_submit.py | 43 +++---------------- 3 files changed, 16 insertions(+), 38 deletions(-) create mode 100644 changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml diff --git a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml new file mode 100644 index 000000000..5d1cf4d60 --- /dev/null +++ b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml @@ -0,0 +1,10 @@ +trivial: + - zos_job_submit - The module handling ZOAU import errors obscured the + original traceback when an import error ocurred. Fix now passes correctly + the context to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + + - zos_job_submit - The module had undocumented parameter and uses as temporary file + when the location of the file is LOCAL. Change now uses the same name as the src + for the temporary file removing the addition of tmp_file to the arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1091). \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index e7d4128ed..c98f1d451 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -142,6 +142,7 @@ def run(self, tmp=None, task_vars=None): dest=dest_path, mode="0600", force=True, + encoding=module_args.get('encoding'), remote_src=True, ) ) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 4177d9e94..36dc1357d 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -620,7 +620,6 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove @@ -638,10 +637,6 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) -if PY3: - from shlex import quote -else: - from pipes import quote JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) @@ -896,11 +891,8 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - from_encoding = parsed_args.get("from_encoding") - to_encoding = parsed_args.get("to_encoding") - # temporary file names for copied files when user sets location to LOCAL - temp_file = parsed_args.get("src") - temp_file_encoded = None + if location == "LOCAL": + temp_file = parsed_args.get("src") # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) @@ -911,9 +903,6 @@ def run_module(): "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) - if temp_file: - temp_file_encoded = NamedTemporaryFile(delete=True) - job_submitted_id = None duration = 0 start_time = timer() @@ -924,30 +913,8 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=True) else: - # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC - conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( - from_encoding, - to_encoding, - quote(temp_file), - quote(temp_file_encoded.name), - ) - - conv_rc, stdout, stderr = module.run_command( - conv_str, - use_unsafe_shell=True, - ) - - if conv_rc == 0: - job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) - else: - result["failed"] = True - result["stdout"] = stdout - result["stderr"] = stderr - result["msg"] = ("Failed to convert the src {0} from encoding {1} to " - "encoding {2}, unable to submit job." - .format(src, from_encoding, to_encoding)) - module.fail_json(**result) + job_submitted_id, duration = submit_src_jcl( + module, temp_file, src_name=src, timeout=wait_time_s, hfs=True) try: # Explictly pass None for the unused args else a default of '*' will be @@ -1036,7 +1003,7 @@ def run_module(): module.exit_json(**result) finally: - if location != "DATA_SET" and location != "USS": + if location == "LOCAL": remove(temp_file) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' From e03622bfd87d64de17ec3db918e8984524b49ccb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 17:55:24 -0600 Subject: [PATCH 245/413] Remove unused import --- plugins/modules/zos_job_submit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 36dc1357d..4b15cb424 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -621,7 +621,6 @@ ) from ansible.module_utils.basic import AnsibleModule from timeit import default_timer as timer -from tempfile import NamedTemporaryFile from os import remove import traceback from time import sleep From 92e2f899476e78b919bbb2467b158455ff3918ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 13 Dec 2023 11:05:52 -0600 Subject: [PATCH 246/413] Return encoded --- plugins/modules/zos_job_submit.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 4b15cb424..d4a5db1ea 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -890,6 +890,8 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") + from_encoding = parsed_args.get("from_encoding") + to_encoding = parsed_args.get("to_encoding") if location == "LOCAL": temp_file = parsed_args.get("src") From 51e51f1710861c273e6ac4b42dd7eca1b77c4cbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 13 Dec 2023 11:20:54 -0600 Subject: [PATCH 247/413] Add encoding --- tests/functional/modules/test_zos_job_submit_func.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 44dfdbf01..9e2d6e400 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -343,6 +343,7 @@ def test_job_submit_LOCAL(ansible_zos_module): results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) for result in results.contacted.values(): + print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True From bad1fc9122843a454196396d93c773a7ea4820f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 13 Dec 2023 11:21:46 -0600 Subject: [PATCH 248/413] Remove unused encoded --- plugins/modules/zos_job_submit.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index d4a5db1ea..4b15cb424 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -890,8 +890,6 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - from_encoding = parsed_args.get("from_encoding") - to_encoding = parsed_args.get("to_encoding") if location == "LOCAL": temp_file = parsed_args.get("src") From 6ebbbf5c922347021781375d82abb4a10a8470d4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 11:05:17 -0600 Subject: [PATCH 249/413] Modified logic to remove tmp files if they exist and not only if location is local --- plugins/modules/zos_job_submit.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 4b15cb424..a0af50054 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -911,9 +911,9 @@ def run_module(): elif location == "USS": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=True) - else: + elif location == "LOCAL": job_submitted_id, duration = submit_src_jcl( - module, temp_file, src_name=src, timeout=wait_time_s, hfs=True) + module, src, src_name=src, timeout=wait_time_s, hfs=True) try: # Explictly pass None for the unused args else a default of '*' will be @@ -1002,7 +1002,7 @@ def run_module(): module.exit_json(**result) finally: - if location == "LOCAL": + if temp_file is not None: remove(temp_file) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' From bd2505b96bd444371e1840c3d0a446059789a6a1 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 11:15:54 -0600 Subject: [PATCH 250/413] Added temp_file null definition --- plugins/modules/zos_job_submit.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index a0af50054..4e2a1d52e 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -890,8 +890,7 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - if location == "LOCAL": - temp_file = parsed_args.get("src") + temp_file = parsed_args.get("src") if location == "LOCAL" else None # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) From fdb9b76becd81f42206398ac847ba0f6d71e07a4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 12:20:15 -0600 Subject: [PATCH 251/413] Update 1091-Update_undocumented_argument_and_import_exception.yml --- ...1091-Update_undocumented_argument_and_import_exception.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml index 5d1cf4d60..d1d1560f8 100644 --- a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml +++ b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml @@ -1,4 +1,4 @@ -trivial: +minor_changes: - zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. @@ -7,4 +7,4 @@ trivial: - zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1091). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/1091). From cd3638f30de5f55b08132f8355b2f18bba28eaa4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 12:58:25 -0600 Subject: [PATCH 252/413] Merge release v1.8.0 into dev branch (#1095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan… * Staging v1.7.0 beta.2 (#939) * Enhancement/866 archive (#930) * Added action plugin zos_unarchive * Added zos_archive changes * Added zos_unarchive changes * Added zos_archive tests changes * Added test zos_unarchive changes * Added zos_archive changes * fixed pep8 issues * Changed source to src in docs * Added correct copyright year * Updated docs * Added changelog fragments * Updated docs * Updated galaxy.yml * Updated meta * Updated docs * Added zos_gather_facts rst * Added changelog * Added release notes * Changed variable name to avoid shadowing import * Delete 930-archive-post-beta.yml * Delete v1.7.0-beta.2_summary.yml * Staging v1.7.0 merge to main (#1019) * Galaxy 1.7 updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update meta runtime to support ansible-core 2.14 or later Signed-off-by: ddimatos <dimatos@gmail.com> * Update ibm_zos_core_meta.yml with updated version Signed-off-by: ddimatos <dimatos@gmail.com> * Update readme to align to supported ansible versions and new urls Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional sanity ignore files to the exclude list Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional sanity ignore files to the exclude list for ansible-lint. Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright yrs for source files that were overlooked Signed-off-by: ddimatos <dimatos@gmail.com> * Remove requirements from module doc, rely on offerings minimum requirements, also zoau 1.2.1 never was supported Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog summary for 1.7 Signed-off-by: ddimatos <dimatos@gmail.com> * Adding generated antsibull-changelog release changelog and artifacts Signed-off-by: ddimatos <dimatos@gmail.com> * Remove v1.7.0_summary, its no longer needed Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes for ac 1.7.0 Signed-off-by: ddimatos <dimatos@gmail.com> * Remove unsupported collection versions requiring a version of zoau that is EOS Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Staging v1.8.0 beta.1 (#1037) * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bugfix/619/mode set for files applied test case (#757) * Add test case for copy d… * [v1.8.0][Backport] Clean temporary data sets created during XMIT unarchive operation (#1054) * Clean temporary data sets created during XMIT unarchive operation (#1049) * Added a temp cleanup * Added changelog * Modified changelog * Added removal of src if remote_src is False Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Modified changelog fragments --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Update 1049-xmit-temporary-data-sets.yml modified PR number --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Cherry picked v1.8.0 (#1063) * Bug 1041 zos submit job honor return output literally (#1058) * initial commit to pass return_output to job_output. * corrected fragment name to match branch * tweaked data set test to show result values if positive test fails * removed trace in zos_data_set, and added trace output to job_submit * removed extra text from functional testing. * put in correct PR number in changelog fragment. * changed trivial to minor_changes, added documentation to dd_scan in job:job_output. * 1043 bug title zos operator is passing wrong value to zoauopercmd (#1044) * corrected kwarg index value from 'wait_arg' to 'wait' Also corrected true/false issue in zoaq * Added and updated changelog. * update PR number in changelog fragment * changed test from \$ to \\$ to eliminate warning * added blocking test to maks sure minimum wait is reached in zoau>1.2.4.5 * removed the else condition from the blocking test, since it is not needed. * corrected tense grammer in changelog fragment * corrected capitalization of ZOAU in changelog fragment. * updated changelog to point to the backport PR * [v1.8.0] [Backport] [zos_script] remote_tmp for zos_script (#1068) * Enabler/1024/remote_tmp for zos_script (#1060) * Changed tmp_path for Ansible's remote_tmp * Remove tmp_path from module's options * Update module documentation * Remove tmp_path test case * Update zos_script's RST file * Add changelog fragment * Updated module examples * Update changelog fragment * [v1.8.0][zos_job_submit] Removes tmp files left behind by zos_job_submit (#1070) * Ensure proper cleanup for ansiballz * Added proper removal of AnsiballZ * [v1.8.0][zos_copy][backport] File wrongly modified after second copy (#1069) * [zos_copy] Files corrupted after second copy (#1064) * Initial change to replace shutil.copy * Added fix for corrupted directory copies * Added changelog fragment * Modified docstring and fixed copy_tree * Added punctiation * Added copystat * Added set mode for dirs * Update 1064-corruped-second-copy.yml * Updated changelog * [v1.8.0] [backport] [Documentation] [zos_tso_command] Add REXX exec example (#1072) * [Documentation] [zos_tso_command] Add REXX exec example (#1065) * Add REXX exec example * Add fragment * Update module documentation * Fix PR link * Reword example task name * Updated REXX example * Update changelog fragment * Update RST file --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * [v1.8.0] [Backport] [zos_copy] mvs to non existent mvs copy verify destination attrs match (#1067) * Add changes * Add fragment * Modify fragment * Modify fragment * [zos_copy] fix for executables copied from local fail with iconv error (#1079) * Added fix for executables copied from local and test * Added changelog * update link in managed_node doc (#1089) * update link in managed_node doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Updated docs regarding managed node Signed-off-by: ddimatos <dimatos@gmail.com> * Doc fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Merge staging-v1.8.0-tasks into staging-v1.8.0 (#1090) * Modified galaxy version * mofidied meta * Update copyright year * Generated module docs * Created changelog * Removed changelog fragments * Updated changelog and release notes * Fixed newline sequences * Update CHANGELOG.rst * Update CHANGELOG.rst * Corrected release notes * Removed duplicated import zoau --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- CHANGELOG.rst | 21 ++- changelogs/changelog.yaml | 49 +++++++ docs/source/release_notes.rst | 44 ++++-- docs/source/requirements-single.rst | 12 +- galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 4 +- plugins/action/zos_job_submit.py | 20 ++- plugins/module_utils/data_set.py | 2 +- plugins/module_utils/encode.py | 2 +- plugins/module_utils/job.py | 2 +- plugins/module_utils/mvs_cmd.py | 2 +- plugins/module_utils/zoau_version_checker.py | 2 +- plugins/modules/zos_blockinfile.py | 2 +- plugins/modules/zos_copy.py | 20 ++- plugins/modules/zos_job_submit.py | 2 +- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- plugins/modules/zos_tso_command.py | 2 +- .../functional/modules/test_zos_copy_func.py | 131 ++++++++++++++++++ 19 files changed, 274 insertions(+), 49 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2c2815de4..a5883246e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,13 +5,13 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.8.0-beta.1 -============= +v1.8.0 +====== Release Summary --------------- -Release Date: '2023-10-24' +Release Date: '2023-12-08' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -31,18 +31,31 @@ Minor Changes - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) - zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_script - add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). +- zos_submit_job - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) -- zos_copy - add support in zos_copy for text files and data sets containing ASA control characters. (https://github.com/ansible-collections/ibm_zos_core/pull/1028) + +Deprecated Features +------------------- + +- zos_blockinfile - debug is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). Bugfixes -------- - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). +- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) - zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) - zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) - zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) +- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). Known Issues ------------ diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 2e50559d7..35eeaebb0 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1023,6 +1023,55 @@ releases: - 930-archive-post-beta.yml - v1.7.0-beta.2_summary.yml release_date: '2023-08-21' + 1.8.0: + changes: + bugfixes: + - zos_copy - When copying an executable data set from controller to managed + node, copy operation failed with an encoding error. Fix now avoids encoding + when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). + - zos_copy - When copying an executable data set with aliases and destination + did not exist, destination data set was created with wrong attributes. Fix + now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). + - zos_operator - The module was ignoring the wait time argument. The module + now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_operator_action_query - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_unarchive - When zos_unarchive fails during unpack either with xmit or + terse it does not clean the temporary data sets created. Fix now removes the + temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). + minor_changes: + - zos_script - Add support for remote_tmp from the Ansible configuration to + setup where temporary files will be created, replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1068). + - zos_job_submit - Previous code did not return output, but still requested + job data from the target system. This changes to honor return_output=false + by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_tso_command - Add example for executing explicitly a REXX script from + a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). + release_summary: 'Release Date: ''2023-12-08'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1041-bug-zos-submit-job-honor-return-output-literally.yml + - 1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml + - 1049-xmit-temporary-data-sets.yml + - 1060-remote_tmp_zos_script.yml + - 1067-mvs_to_non_existent_mvs_copy_verify_destination_attrs_match.yml + - 1069-corrupted-second-copy.yml + - 1072-rexx-exec-tso_command.yml + - 1079-zos-copy-local-executable.yml + - 1089-update-managed_node_doc.yml + - v1.8.0-summary.yml + release_date: '2023-12-08' 1.8.0-beta.1: changes: bugfixes: diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 10150952d..b198d74de 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,8 +6,8 @@ Releases ======== -Version 1.8.0-beta.1 -==================== +Version 1.8.0 +============= New Modules ----------- @@ -22,29 +22,46 @@ Minor Changes - Enhanced test cases to use test lines the same length of the record length. - ``zos_copy`` - - Add validation into path joins to detect unauthorized path traversals. + - Add validation into path joins to detect unauthorized path traversals. - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. - - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. - - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. - - add support in zos_copy for text files and data sets containing ASA control characters. + - Includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. + - Introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. + - Add support in zos_copy for text files and data sets containing ASA control characters. - ``zos_fetch`` - Add validation into path joins to detect unauthorized path traversals. -- ``zos_job_submit`` - Change action plugin call from copy to zos_copy. -- ``zos_operator`` - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. +- ``zos_job_submit`` + + - Change action plugin call from copy to zos_copy. + - Previous code did not return output, but still requested job data from the target system. This changes to honor `return_output=false` by not querying the job dd segments at all. +- ``zos_operator`` - Changed system to call `wait=true` parameter to zoau call. Requires zoau 1.2.5 or later. - ``zos_operator_action_query`` - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. - ``zos_unarchive`` - - Add validation into path joins to detect unauthorized path traversals. + - Add validation into path joins to detect unauthorized path traversals. - Enhanced test cases to use test lines the same length of the record length. - ``module_utils/template`` - Add validation into path joins to detect unauthorized path traversals. +- ``zos_tso_command`` - Add example for executing explicitly a REXX script from a data set. +- ``zos_script`` - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. Bugfixes -------- -- ``zos_copy`` - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. -- ``zos_job_submit`` - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. -- ``zos_job_submit`` - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. +- ``zos_copy`` + + - Update option to include `LIBRARY` as dest_dataset/suboption value. Documentation updated to reflect this change. + - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when `executable` option is selected. + - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. + - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. +- ``zos_job_submit`` + + - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. + - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. - ``zos_lineinfile`` - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. -- ``zos_operator`` - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. +- ``zos_operator`` + + - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. + - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. +- ``zos_operator_action_query`` - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. +- ``zos_unarchive`` - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. Known Issues ------------ @@ -55,6 +72,7 @@ Known Issues Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst index 3f0b2b8e0..e31c9636a 100644 --- a/docs/source/requirements-single.rst +++ b/docs/source/requirements-single.rst @@ -12,7 +12,7 @@ Requirements The **IBM z/OS core collection** requires both a **control node** and **managed node** be configured with a minimum set of requirements. The control node is often referred to as the **controller** and the -managed node as the **host**. +managed node as the **host** or **target**. Control node ============ @@ -32,17 +32,19 @@ The managed z/OS node is the host that is managed by Ansible, as identified in the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, some dependencies are required to be installed on z/OS such as: +* `z/OS`_ * `z/OS`_ * `z/OS OpenSSH`_ * `z/OS® shell`_ * `IBM Open Enterprise SDK for Python`_ * `IBM Z Open Automation Utilities`_ - .. note:: - Each release of the IBM z/OS core collection depends on specific dependency - versions. For information on the dependencies or the versions, review the - `release notes`_ reference section. +.. note:: + + Each release of the IBM z/OS core collection depends on specific dependency + versions. For information on the dependencies or the versions, review the + `release notes`_ reference section. z/OS shell ---------- diff --git a/galaxy.yml b/galaxy.yml index b83b1014a..f7be530c7 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.8.0-beta.1 +version: 1.8.0 # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 7a68a05bb..e1ee28246 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.8.0-beta.1" +version: "1.8.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.2.3" + - "1.2.4" diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index c28fcec76..12ec5514a 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -57,10 +57,14 @@ def run(self, tmp=None, task_vars=None): source = self._task.args.get("src", None) # Get a temporary file on the managed node - tempfile = self._execute_module( - module_name="tempfile", module_args=dict(state="file"), task_vars=task_vars, - ) - dest_path = tempfile.get("path") + dest_path = self._execute_module( + module_name="tempfile", module_args={}, task_vars=task_vars, + ).get("path") + # Calling execute_module from this step with tempfile leaves behind a tmpdir. + # This is called to ensure the proper removal. + tmpdir = self._connection._shell.tmpdir + if tmpdir: + self._remove_tmp_path(tmpdir) result["failed"] = True if source is None: @@ -166,14 +170,6 @@ def run(self, tmp=None, task_vars=None): ) else: result.update(dict(failed=True)) - if rendered_file: - os.remove(rendered_file) - if os.path.isfile(tmp_src): - self._connection.exec_command("rm -rf {0}".format(tmp_src)) - if os.path.isfile(dest_file): - self._connection.exec_command("rm -rf {0}".format(dest_file)) - if os.path.isfile(source_full): - self._connection.exec_command("rm -rf {0}".format(source_full)) else: result.update( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index cae505804..12265e1b4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index c36d0b272..26bb983b3 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 94a65d8c3..1b8cb06f6 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index ec4955ac6..6331a1772 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 41dd35276..12470ef19 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 1751c6472..7a2adf7cc 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index a2e545d8b..5d68d78a5 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2683,7 +2683,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_binary and not is_uss: + if not is_binary and not is_uss and not executable: new_src = temp_path or src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). @@ -2761,6 +2761,21 @@ def run_module(module, arg_def): # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type") + elif executable: + """ When executable is selected and dest_exists is false means an executable PDSE was copied to remote, + so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. + Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED + and LIBRARY is not in MVS_PARTITIONED frozen set.""" + dest_ds_type = "PDSE" + + if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): + dest_has_asa_chars = True + elif not dest_exists and asa_text: + dest_has_asa_chars = True + elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: + dest_attributes = datasets.listing(dest_name)[0] + if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_has_asa_chars = True if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): dest_has_asa_chars = True @@ -3052,7 +3067,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- # Copy to PDS/PDSE # --------------------------------------------------------------------- - elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED: + elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY": if not remote_src and not copy_member and os.path.isdir(temp_path): temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) @@ -3272,6 +3287,7 @@ def main(): not module.params.get("encoding") and not module.params.get("remote_src") and not module.params.get("is_binary") + and not module.params.get("executable") ): module.params["encoding"] = { "from": module.params.get("local_charset"), diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 11f0f3ccb..a099bd135 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 273b0a867..ca6935163 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ccf565626..a035cad33 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 87b157318..28b033a90 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index b42dd9500..42a08890a 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -3428,6 +3428,137 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set(name=dest_lib, state="absent") hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + +@pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases +@pytest.mark.parametrize("is_created", [False, True]) +def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): + + hosts = ansible_zos_module + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + + + try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib + hosts.all.zos_data_set( + name=src_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # copy cobol src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mem)) + # copy cobol2 src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR2), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mem2)) + + # run link-edit for pgm1 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, src_lib, pgm_mem) + assert link_rc == 0 + # run link-edit for pgm2 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem2, src_lib, pgm2_mem, loadlib_alias_mem="ALIAS2") + assert link_rc == 0 + + # execute pgm to test pgm1 + validate_loadlib_pgm(hosts, steplib=src_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + + # fetch loadlib into local + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + # fetch loadlib to local + fetch_result = hosts.all.zos_fetch(src=src_lib, dest=tmp_folder.name, is_binary=True) + for res in fetch_result.contacted.values(): + source_path = res.get("dest") + + if not is_created: + # ensure dest data sets absent for this variation of the test case. + hosts.all.zos_data_set(name=dest_lib, state="absent") + else: + # allocate dest loadlib to copy over without an alias. + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + if not is_created: + # dest data set does not exist, specify it in dest_dataset param. + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src=source_path, + dest="{0}".format(dest_lib), + executable=True, + aliases=False, + dest_data_set={ + 'type': "PDSE", + 'record_format': "U", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "M", + } + ) + else: + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src=source_path, + dest="{0}".format(dest_lib), + executable=True, + aliases=False + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + # number of members + assert len(stdout.splitlines()) == 2 + + finally: + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + + @pytest.mark.pdse @pytest.mark.loadlib @pytest.mark.aliases From 54ebf936068e11605d25242b8008eb814f85618b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 13:22:40 -0600 Subject: [PATCH 253/413] Removed print statement in test --- tests/functional/modules/test_zos_job_submit_func.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 9e2d6e400..44dfdbf01 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -343,7 +343,6 @@ def test_job_submit_LOCAL(ansible_zos_module): results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) for result in results.contacted.values(): - print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True From 00561fbcc5884d4844606d9869ebaacc7d0da5ce Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 18 Dec 2023 11:16:54 -0600 Subject: [PATCH 254/413] fix undefined variable --- plugins/action/zos_job_submit.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 12ec5514a..7906dfa38 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -57,9 +57,10 @@ def run(self, tmp=None, task_vars=None): source = self._task.args.get("src", None) # Get a temporary file on the managed node - dest_path = self._execute_module( + tempfile = self._execute_module( module_name="tempfile", module_args={}, task_vars=task_vars, - ).get("path") + ) + dest_path = tempfile.get("path") # Calling execute_module from this step with tempfile leaves behind a tmpdir. # This is called to ensure the proper removal. tmpdir = self._connection._shell.tmpdir From f501463ab0c7674c26b11bd2e76e2276d44f193f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 18 Dec 2023 11:36:22 -0600 Subject: [PATCH 255/413] Add changelog --- changelogs/fragments/1101-fix-undefined-var.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelogs/fragments/1101-fix-undefined-var.yml diff --git a/changelogs/fragments/1101-fix-undefined-var.yml b/changelogs/fragments/1101-fix-undefined-var.yml new file mode 100644 index 000000000..fc0c2be16 --- /dev/null +++ b/changelogs/fragments/1101-fix-undefined-var.yml @@ -0,0 +1,2 @@ +trivial: + - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. (https://github.com/ansible-collections/ibm_zos_core/pull/1101) \ No newline at end of file From f84389fbe0c0021a84402323efc009bc89a3a2ce Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 18 Dec 2023 11:42:42 -0600 Subject: [PATCH 256/413] Modified changelog --- changelogs/fragments/1101-fix-undefined-var.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/changelogs/fragments/1101-fix-undefined-var.yml b/changelogs/fragments/1101-fix-undefined-var.yml index fc0c2be16..1d9eeba3c 100644 --- a/changelogs/fragments/1101-fix-undefined-var.yml +++ b/changelogs/fragments/1101-fix-undefined-var.yml @@ -1,2 +1,3 @@ trivial: - - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. (https://github.com/ansible-collections/ibm_zos_core/pull/1101) \ No newline at end of file + - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. + (https://github.com/ansible-collections/ibm_zos_core/pull/1101). \ No newline at end of file From 753e1072b3136c669d7ac1a3c9809579683d9260 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 14:38:39 -0600 Subject: [PATCH 257/413] Change in fetch --- tests/functional/modules/test_zos_fetch_func.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index bc1154de2..7ca003e16 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -161,6 +161,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PS, state="present", size="5m") params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -172,6 +173,7 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From da5b8c7292fd08bb6cfd18ceabcb7a024d2ecc3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 15:04:42 -0600 Subject: [PATCH 258/413] Modify fetch --- .../functional/modules/test_zos_fetch_func.py | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 7ca003e16..b496e2750 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -12,9 +12,7 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - extract_member_name -) + import os import shutil import stat @@ -79,6 +77,15 @@ /* """ +def extract_member_name(data_set): + start = data_set.find("(") + member = "" + for i in range(start + 1, len(data_set)): + if data_set[i] == ")": + break + member += data_set[i] + return member + def create_and_populate_test_ps_vb(ansible_zos_module): params=dict( name=TEST_PS_VB, @@ -162,6 +169,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_data_set(name=TEST_PS, state="present", size="5m") + hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -199,6 +207,8 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PDS, state="present") + hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PDS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PDS try: @@ -211,6 +221,7 @@ def test_fetch_partitioned_data_set(ansible_zos_module): assert os.path.exists(dest_path) assert os.path.isdir(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): shutil.rmtree(dest_path) @@ -278,6 +289,9 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PDS, state="present") + hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") + hosts.all.zos_lineinfile(path=TEST_PDS_MEMBER, line="unset ZOAU_ROOT", state="present") params = dict( src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True ) @@ -293,12 +307,15 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): assert os.path.exists(dest_path) assert os.path.isfile(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PS, state="present") + hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS try: @@ -310,12 +327,15 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): assert result.get("is_binary") is True assert os.path.exists(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PDS, state="present") + hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PDS try: @@ -328,6 +348,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): assert os.path.exists(dest_path) assert os.path.isdir(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): shutil.rmtree(dest_path) @@ -474,6 +495,8 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module ds_name = TEST_PS + hosts.all.zos_data_set(name=TEST_PS, state="present") + hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") dest_path = "/tmp/" + ds_name with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) @@ -487,6 +510,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): assert result.get("module_stderr") is None assert checksum(dest_path, hash_func=sha256) != local_checksum finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From 9c7344f406753c5e848a876bc13ee67f992dd5f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 17:30:57 -0600 Subject: [PATCH 259/413] Fix fetch --- .../functional/modules/test_zos_fetch_func.py | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b496e2750..62abd1cf8 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -168,8 +168,9 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PS, state="present", size="5m") - hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") + TEST_PS = "USER.TEST.FETCH" + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -207,8 +208,11 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PDS, state="present") - hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") + TEST_PDS = "USER.TEST.FETCH" + TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") + hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) params = dict(src=TEST_PDS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PDS try: @@ -291,7 +295,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_lineinfile(path=TEST_PDS_MEMBER, line="unset ZOAU_ROOT", state="present") + hosts.all.zos_blockinfile(src=TEST_PDS_MEMBER, block=TEST_DATA) params = dict( src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True ) @@ -314,8 +318,9 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") + TEST_PS = "USER.TEST.FETCH" + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS try: @@ -334,8 +339,11 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PDS, state="present") - hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") + TEST_PDS = "USER.TEST.FETCH" + TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") + hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PDS try: @@ -494,9 +502,11 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = "USER.TEST.FETCH" + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") + hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) dest_path = "/tmp/" + ds_name with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) From d608196d1452e3eda01d23f5fe7583a86c9c5be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:29:54 -0600 Subject: [PATCH 260/413] Fix find and continue with fetch --- .../functional/modules/test_zos_fetch_func.py | 1 + .../functional/modules/test_zos_find_func.py | 31 +++++++++++++------ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 62abd1cf8..c5b1d6a86 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,6 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 345927fe5..0c75ce91e 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -216,20 +216,31 @@ def test_find_data_sets_older_than_age(ansible_zos_module): def test_find_data_sets_larger_than_size(ansible_zos_module): hosts = ansible_zos_module - find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='100k') - print(vars(find_res)) - for val in find_res.contacted.values(): - assert len(val.get('data_sets')) == 2 - assert val.get('matched') == 2 + TEST_PS1 = 'TEST.PS.ONE' + TEST_PS2 = 'TEST.PS.TWO' + try: + res = hosts.all.zos_data_set(name=TEST_PS1, state="present", size="5m") + res = hosts.all.zos_data_set(name=TEST_PS2, state="present", size="5m") + find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) == 2 + assert val.get('matched') == 2 + finally: + hosts.all.zos_data_set(name=TEST_PS1, state="absent") + hosts.all.zos_data_set(name=TEST_PS2, state="absent") def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module - find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='-1m') - print(vars(find_res)) - for val in find_res.contacted.values(): - assert len(val.get('data_sets')) == 1 - assert val.get('matched') == 1 + TEST_PS = 'IMSTESTL.MQBATCH.PS' + try: + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k") + find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='-1m') + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) == 1 + assert val.get('matched') == 1 + finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") def test_find_data_sets_in_volume(ansible_zos_module): From f470f2fb81d9a6b206ab132ea54e82b458947b8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:38:21 -0600 Subject: [PATCH 261/413] Catalog a vsam --- tests/functional/modules/test_zos_fetch_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index c5b1d6a86..459eb7b86 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,7 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS") + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS", state="cataloged", volumes="000000") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: From c7369866eb7e94756f61d10f7cf36ceaf77a8222 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:43:38 -0600 Subject: [PATCH 262/413] Catalog a vsam --- tests/functional/modules/test_zos_fetch_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 459eb7b86..316f5fbdd 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,7 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS", state="cataloged", volumes="000000") + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, type="KSDS", state="cataloged", volumes="000000") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: From dff9a222419fac105262060a37ef95afce33ffc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:44:06 -0600 Subject: [PATCH 263/413] Catalog a vsam --- tests/functional/modules/test_zos_fetch_func.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 316f5fbdd..29a60754b 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -288,6 +288,7 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) finally: + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From 2b6fd8a6354b270eec0c27cadae3c422fcbb3f0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 11:17:17 -0600 Subject: [PATCH 264/413] Add name to vsam --- tests/functional/modules/test_zos_fetch_func.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 29a60754b..3e3c3fc28 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,6 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module + TEST_EMPTY_VSAM = "TEST.VSAM.DATA" hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, type="KSDS", state="cataloged", volumes="000000") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM From e8db7353e6fc8072a845be74fc1af86ad8fbb5a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 11:53:19 -0600 Subject: [PATCH 265/413] Add name to vsam --- .../functional/modules/test_zos_fetch_func.py | 36 +++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 3e3c3fc28..b030f2167 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -110,6 +110,36 @@ def delete_test_ps_vb(ansible_zos_module): ansible_zos_module.all.zos_data_set(**params) +def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): + """Creates a new VSAM on the system. + + Arguments: + hosts (object) -- Ansible instance(s) that can call modules. + name (str) -- Name of the VSAM data set. + type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + add_data (bool, optional) -- Whether to add records to the VSAM. + key_length (int, optional) -- Key length (only for KSDS data sets). + key_offset (int, optional) -- Key offset (only for KSDS data sets). + """ + params = dict( + name=name, + type=ds_type, + state="present" + ) + if ds_type == "KSDS": + params["key_length"] = key_length + params["key_offset"] = key_offset + + hosts.all.zos_data_set(**params) + + if add_data: + record_src = "/tmp/zos_copy_vsam_record" + + hosts.all.zos_copy(content=VSAM_RECORDS, dest=record_src) + hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + hosts.all.file(path=record_src, state="absent") + + def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module params = dict(src="/etc/profile", dest="/tmp/", flat=True) @@ -276,8 +306,8 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module - TEST_EMPTY_VSAM = "TEST.VSAM.DATA" - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, type="KSDS", state="cataloged", volumes="000000") + src_ds = "TEST.VSAM.DATA" + create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: @@ -289,7 +319,7 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) finally: - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="absent") + hosts.all.zos_data_set(name=src_ds, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From c6c8459865ef879a0cf171b7227c3bcb944f6add Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 12:02:06 -0600 Subject: [PATCH 266/413] Add name to vsam --- tests/functional/modules/test_zos_fetch_func.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b030f2167..a99c1cf18 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -77,6 +77,11 @@ /* """ +VSAM_RECORDS = """00000001A record +00000002A record +00000003A record +""" + def extract_member_name(data_set): start = data_set.find("(") member = "" From 75a1e348e71e54d73b87c7cf6bc0272330348f7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 12:15:40 -0600 Subject: [PATCH 267/413] Add name to vsam --- tests/functional/modules/test_zos_fetch_func.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index a99c1cf18..8f15d0fd8 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -312,9 +312,9 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module src_ds = "TEST.VSAM.DATA" - create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) - params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) - dest_path = "/tmp/" + TEST_EMPTY_VSAM + create_vsam_data_set(hosts, src_ds, "KSDS", key_length=12, key_offset=0) + params = dict(src=src_ds, dest="/tmp/", flat=True) + dest_path = "/tmp/" + src_ds try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): From 65c48c0276ce0c12abc6e701b8f37c00cf43e419 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 16:21:56 -0600 Subject: [PATCH 268/413] Delete and add names to pds with member --- tests/functional/modules/test_zos_fetch_func.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 8f15d0fd8..9593d9307 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -331,6 +331,8 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = "TEST.DATASET.TEST" + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.zos_blockinfile(src=TEST_PDS_MEMBER, block=TEST_DATA) From 5d495d0efb517ce4f198efc502ec82776f707126 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 17:50:21 -0600 Subject: [PATCH 269/413] Remove other modules dependencies --- .../functional/modules/test_zos_fetch_func.py | 36 +++++++------------ 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 8f15d0fd8..ef33a9483 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -30,12 +30,11 @@ """ -TEST_PS = "IMSTESTL.IMS01.DDCHKPT" +TEST_PS = "USER.PRIV.TEST" TEST_PS_VB = "USER.PRIV.PSVB" -TEST_PDS = "IMSTESTL.COMNUC" -TEST_PDS_MEMBER = "IMSTESTL.COMNUC(ATRQUERY)" +TEST_PDS = "USER.PRIV.TESTPDS" +TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(ATRQUERY)" TEST_VSAM = "FETCH.TEST.VS" -TEST_EMPTY_VSAM = "IMSTESTL.LDS01.WADS0" FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" USS_FILE = "/tmp/fetch.data" @@ -100,11 +99,7 @@ def create_and_populate_test_ps_vb(ansible_zos_module): block_size='3190' ) ansible_zos_module.all.zos_data_set(**params) - params = dict( - src=TEST_PS_VB, - block=TEST_DATA - ) - ansible_zos_module.all.zos_blockinfile(**params) + ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS_VB)) def delete_test_ps_vb(ansible_zos_module): @@ -115,7 +110,7 @@ def delete_test_ps_vb(ansible_zos_module): ansible_zos_module.all.zos_data_set(**params) -def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): +def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None): """Creates a new VSAM on the system. Arguments: @@ -137,13 +132,6 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.zos_data_set(**params) - if add_data: - record_src = "/tmp/zos_copy_vsam_record" - - hosts.all.zos_copy(content=VSAM_RECORDS, dest=record_src) - hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) - hosts.all.file(path=record_src, state="absent") - def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module @@ -205,7 +193,7 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") - hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -247,7 +235,7 @@ def test_fetch_partitioned_data_set(ansible_zos_module): TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PDS try: @@ -333,7 +321,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_blockinfile(src=TEST_PDS_MEMBER, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict( src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True ) @@ -358,7 +346,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") - hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS try: @@ -381,7 +369,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PDS try: @@ -544,7 +532,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) dest_path = "/tmp/" + ds_name with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) From 4dbe17d0caa05f758cab1feaf5b47a6ccbb5f7be Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 17:52:26 -0600 Subject: [PATCH 270/413] Remove hardcoded content --- tests/functional/modules/test_zos_fetch_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b1bd2db5d..d60a82011 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -33,7 +33,7 @@ TEST_PS = "USER.PRIV.TEST" TEST_PS_VB = "USER.PRIV.PSVB" TEST_PDS = "USER.PRIV.TESTPDS" -TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(ATRQUERY)" +TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(MEM1)" TEST_VSAM = "FETCH.TEST.VS" FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" From 8a0cf6c051c11e4dc90ea1ee342330a91bc2305a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 18:35:15 -0600 Subject: [PATCH 271/413] Modified variable definitions --- tests/functional/modules/test_zos_fetch_func.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index d60a82011..cf942dc09 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -191,7 +191,6 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) @@ -231,8 +230,6 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module - TEST_PDS = "USER.TEST.FETCH" - TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -319,8 +316,6 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module - TEST_PDS = "TEST.DATASET.TEST" - TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -346,7 +341,6 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) From f91978b96f81f1fea1e19183d753939490d85190 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 19:10:52 -0600 Subject: [PATCH 272/413] removed variable assignments --- tests/functional/modules/test_zos_fetch_func.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index cf942dc09..c55162cd2 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -361,8 +361,6 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module - TEST_PDS = "USER.TEST.FETCH" - TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -524,7 +522,6 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") From 6fb2d464a8346e9713794b2a5713236d69e49eb1 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 19:33:27 -0600 Subject: [PATCH 273/413] Changed IMSTESTL name --- tests/functional/modules/test_zos_find_func.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 0c75ce91e..79df4efac 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -232,10 +232,10 @@ def test_find_data_sets_larger_than_size(ansible_zos_module): def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = 'IMSTESTL.MQBATCH.PS' + TEST_PS = 'USER.FIND.TEST' try: hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k") - find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='-1m') + find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == 1 From 9f3d982ec2bb1bc9a694c7676b68dc5fe153b0eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Thu, 4 Jan 2024 12:04:51 -0600 Subject: [PATCH 274/413] Add fragment --- .../1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml diff --git a/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml b/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml new file mode 100644 index 000000000..7a470d57c --- /dev/null +++ b/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml @@ -0,0 +1,4 @@ +trivial: + - zos_fetch - remove hardcoded datasets and dependencies from test cases. + - zos_find - remove hardcoded datasets and dependencies from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1158). \ No newline at end of file From dd71ecbc9ee8752d9635155434fefe372ccba954 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 5 Jan 2024 09:39:26 -0600 Subject: [PATCH 275/413] [Bugfix][v1.9.0]short job name sends back a value error with a full stack trace as the msg (#1078) * Add types to better_args_parser * Remove unused re import * Manage spaces * Manage documentation * Manage documentation * Modify regex * Modify regex * Add fragment * Test cases * Change bool to str * Solve documentation * Remove runtime error * Remove re * Test case for owner and job id and add to changelog * Cover all cases --- ...ort_job_name_sends_back_a_value_error.yaml | 11 +++ plugins/module_utils/better_arg_parser.py | 30 +++++++ plugins/modules/zos_job_output.py | 20 +++++ plugins/modules/zos_job_query.py | 83 +++++-------------- .../modules/test_zos_job_output_func.py | 7 ++ .../modules/test_zos_job_query_func.py | 15 +++- 6 files changed, 104 insertions(+), 62 deletions(-) create mode 100644 changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml diff --git a/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml b/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml new file mode 100644 index 000000000..dd9dc98a5 --- /dev/null +++ b/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml @@ -0,0 +1,11 @@ +bugfixes: + - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. + Change now allows the use of a shorter job ID or name, as well as wildcards. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. + Change now allows the use of a shorter job ID or name, as well as wildcards. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + +minor_change: + - zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). \ No newline at end of file diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index 6262d4110..6720f8d10 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -148,6 +148,7 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): "data_set_or_path": self._data_set_or_path_type, "encoding": self._encoding_type, "dd": self._dd_type, + "job_identifier": self._job_identifier, } def handle_arg(self): @@ -743,6 +744,35 @@ def _call_arg_function(self, arg_function, contents): ) ) + # ---------------------------------------------------------------------------- # + # JOB ID AND JOB NAME NAMING RULES # + # ---------------------------------------------------------------------------- # + + def _job_identifier(self, contents, resolve_dependencies): + """Resolver for data_set type arguments. + A text string of up to 8 characters. + The first character must be a letter or a national (#, $, @) character. + Other characters can be letters, numbers, or national (#, $, @) characters. + If the text string contains #, $, or @, enclose the text string in single or double quotation marks. + + Arguments: + contents {str} -- The contents of the argument. + + Raises: + ValueError: When contents is invalid argument type + Returns: + str -- The arguments contents after any necessary operations. + """ + if not fullmatch( + r"(^[a-zA-Z$#@%}]{1}[0-9a-zA-Z$#@%*]{1,7})|(^['\*']{1})", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument "{0}" for type "job_id or job_name".'.format(contents) + ) + return str(contents) + class BetterArgParser(object): def __init__(self, arg_dict): diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index ec4aa0313..40c7d61d0 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -417,6 +417,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( job_output, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) def run_module(): @@ -429,6 +432,23 @@ def run_module(): module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + args_def = dict( + job_id=dict(type="job_identifier", required=False), + job_name=dict(type="job_identifier", required=False), + owner=dict(type="str", required=False), + ddname=dict(type="str", required=False), + ) + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index cf94fa684..aaa72d9ab 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -258,10 +258,11 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( job_status, ) - +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_text -import re def run_module(): @@ -276,11 +277,29 @@ def run_module(): module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + args_def = dict( + job_name=dict(type="job_identifier", required=False), + owner=dict(type="str", required=False), + job_id=dict(type="job_identifier", required=False), + ) + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + if module.check_mode: return result try: - name, id, owner = validate_arguments(module.params) + name = module.params.get("job_name") + id = module.params.get("job_id") + owner = module.params.get("owner") jobs_raw = query_jobs(name, id, owner) if jobs_raw: jobs = parsing_jobs(jobs_raw) @@ -293,64 +312,6 @@ def run_module(): module.exit_json(**result) -# validate_arguments returns a tuple, so we don't have to rebuild the job_name string -def validate_arguments(params): - job_name_in = params.get("job_name") - - job_id = params.get("job_id") - - owner = params.get("owner") - if job_name_in or job_id: - if job_name_in and job_name_in != "*": - job_name_pattern = re.compile(r"^[a-zA-Z$#@%][0-9a-zA-Z$#@%]{0,7}$") - job_name_pattern_with_star = re.compile( - r"^[a-zA-Z$#@%][0-9a-zA-Z$#@%]{0,6}\*$" - ) - test_basic = job_name_pattern.search(job_name_in) - test_star = job_name_pattern_with_star.search(job_name_in) - # logic twist: test_result should be a non-null value from test_basic or test_star - test_result = test_basic - if test_star: - test_result = test_star - - job_name_short = "unused" - # if neither test_basic nor test_star were non-null, check if the string needed to be truncated to the first * - if not test_result: - ix = job_name_in.find("*") - if ix >= 0: - job_name_short = job_name_in[0:ix + 1] - test_result = job_name_pattern.search(job_name_short) - if not test_result: - test_result = job_name_pattern_with_star.search(job_name_short) - - # so now, fail if neither test_basic, test_star or test_base from job_name_short found a match - if not test_result: - raise RuntimeError("Unable to locate job name {0}.".format(job_name_in)) - - if job_id: - job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") - test_basic = job_id_pattern.search(job_id) - test_result = None - - if not test_basic: - ix = job_id.find("*") - if ix > 0: - # this differs from job_name, in that we'll drop the star for the search - job_id_short = job_id[0:ix] - - if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: - test_result = job_id_short - - if not test_basic and not test_result: - raise RuntimeError("Failed to validate the job id: " + job_id) - else: - raise RuntimeError("Argument Error:Either job name(s) or job id is required") - if job_id and owner: - raise RuntimeError("Argument Error:job id can not be co-exist with owner") - - return job_name_in, job_id, owner - - def query_jobs(job_name, job_id, owner): jobs = [] diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 11b7cd90d..830828769 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -141,3 +141,10 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): assert job.get("ddnames")[0].get("ddname") == dd_name finally: hosts.all.file(path=TEMP_PATH, state="absent") + + +def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") + for result in results.contacted.values(): + assert result.get("jobs") is not None diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 7128f12a7..b7c412cd4 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -28,7 +28,6 @@ def test_zos_job_query_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="*", owner="*") - pprint(vars(results)) for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs") is not None @@ -111,3 +110,17 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") hosts.all.zos_data_set(name=NDATA_SET_NAME, state="absent") + + +def test_zos_job_id_query_short_ids_func(ansible_zos_module): + hosts = ansible_zos_module + qresults = hosts.all.zos_job_query(job_id="STC003") + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None + + +def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): + hosts = ansible_zos_module + qresults = hosts.all.zos_job_query(job_id="STC00*") + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None From dd8db396e78776d3fe60eb85aaed8032becf8d87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 12 Jan 2024 20:51:21 -0600 Subject: [PATCH 276/413] [Enabler][zos_copy]Refactor calls to use new alias and execute options (#1163) * Refactor zos copy and test case fixed --- ...s_to_use_new_alias_and_execute_options.yml | 3 +++ plugins/modules/zos_copy.py | 19 +++---------------- .../functional/modules/test_zos_copy_func.py | 11 ++++++++--- 3 files changed, 14 insertions(+), 19 deletions(-) create mode 100644 changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml diff --git a/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml b/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml new file mode 100644 index 000000000..6cd512427 --- /dev/null +++ b/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Change call to ZOAU python API by using a dictionary to arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1163). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 5d68d78a5..9d411f459 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1543,16 +1543,12 @@ def _mvs_copy_to_uss( except FileExistsError: pass - opts = dict() - if self.executable: - opts["options"] = "-IX " - try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest) elif self.executable: - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, alias=True, executable=True) else: response = datasets._copy(src, dest) @@ -1565,7 +1561,7 @@ def _mvs_copy_to_uss( ) else: if self.executable: - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, None, alias=True, executable=True) if response.rc != 0: raise CopyOperationError( @@ -1770,19 +1766,10 @@ def copy_to_member( if self.is_binary or self.asa_text: opts["options"] = "-B" - if self.aliases and not self.executable: - # lower case 'i' for text-based copy (dcp) - opts["options"] = "-i" - - if self.executable: - opts["options"] = "-X" - if self.aliases: - opts["options"] = "-IX" - if self.force_lock: opts["options"] += " -f" - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, alias=self.aliases, executable=self.executable, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response return dict( diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 42a08890a..15e1cd499 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -3550,8 +3550,13 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): assert v_cp.get("rc") == 0 stdout = v_cp.get("stdout") assert stdout is not None - # number of members - assert len(stdout.splitlines()) == 2 + # verify pgms remain executable + pgm_output_map = { + (dest_lib, pgm_mem, COBOL_PRINT_STR), + (dest_lib, pgm2_mem, COBOL_PRINT_STR2), + } + for steplib, pgm, output in pgm_output_map: + validate_loadlib_pgm(hosts, steplib=steplib, pgm_name=pgm, expected_output_str=output) finally: hosts.all.zos_data_set(name=cobol_src_pds, state="absent") @@ -3689,7 +3694,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): ) # copy USS dir to dest library pds w aliases copy_res_aliases = hosts.all.zos_copy( - src="{0}{1}".format(uss_dir_path, src_lib.upper()), + src="{0}/{1}".format(uss_dir_path, src_lib.upper()), dest="{0}".format(dest_lib_aliases), remote_src=True, executable=True, From df4189bb0c031a97fca96ded365e78f44050752f Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 15 Jan 2024 13:21:29 -0700 Subject: [PATCH 277/413] Update ZOAU list to include v1.3.0 (#1166) --- scripts/mounts.env | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/mounts.env b/scripts/mounts.env index 050887102..7240eaaeb 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -41,7 +41,8 @@ zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ "13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ "14:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ "15:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V125.ZFS "\ -"16:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +"16:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V130.ZFS "\ +"17:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE From 2900ffbbc8a0454b59700f4afeeab7e7508298e7 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 09:55:02 -0600 Subject: [PATCH 278/413] [v1.10.0][zos_lineinfile] Removed zos_copy dependency from test cases (#1152) * Changed zos_copy to echo * Modified test case * Added changelog --- ...-lineinfile-remove-zos_copy-dependency.yml | 3 ++ .../modules/test_zos_lineinfile_func.py | 34 +++++++++---------- 2 files changed, 20 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml diff --git a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml new file mode 100644 index 000000000..44015bbd9 --- /dev/null +++ b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - remove zos_copy calls from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1152). diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 754316ff3..94f94cb7a 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -29,14 +29,13 @@ int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; -} -""" +}""" call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH @@ -893,12 +892,12 @@ def test_ds_line_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format( + DEFAULT_DATA_SET_NAME, + MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) @@ -946,12 +945,13 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.file(path="/tmp/disp_shr", state='directory') + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format( + DEFAULT_DATA_SET_NAME, + MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) From f8ea3f02de1da551b847b132825eb761d69f4324 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 09:55:21 -0600 Subject: [PATCH 279/413] [v1.10.0][zos_fetch] Remove zos_copy dependency from test cases (#1165) * Removed zos_copy from zos_fetch test cases * Added trailing char * Initial commit * Updated changelog * removed old fragment * Update 1165-remove-zos-copy-dep-from-zos-fetch.yml --- .../fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml | 3 +++ tests/functional/modules/test_zos_fetch_func.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml diff --git a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml new file mode 100644 index 000000000..9c8593c1a --- /dev/null +++ b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove zos_copy dependency from zos_fetch test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1165). diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index c55162cd2..3b4a9c371 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -263,7 +263,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True ) - hosts.all.zos_copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( src=USS_FILE, dest=TEST_VSAM, From 27c41131bc528bc0aa1499c9855743c6af122289 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 09:57:51 -0600 Subject: [PATCH 280/413] [v1.10.0][zos_encode] Remove zos_copy dependency from test cases (#1157) * Removed zos_copy dependency * Added changelog --- .../fragments/1157-remove-zos-copy-from-zos-encode-tests.yml | 3 +++ tests/functional/modules/test_zos_encode_func.py | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml diff --git a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml new file mode 100644 index 000000000..24f2802d5 --- /dev/null +++ b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - Remove zos_copy dependency from zos_encode test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1157). diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 5f1e8cfbf..7b7952387 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -898,8 +898,7 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.zos_copy(src=TEMP_JCL_PATH, dest=MVS_PS, remote_src=True) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_FILE_TEXT, MVS_PS)) enc_ds = hosts.all.zos_encode( src=MVS_PS, encoding={ From 94180f534d58cfba251bb6c5ed1f4991c87a18cb Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 10:00:10 -0600 Subject: [PATCH 281/413] [v1.10.0][zos_archive] Remove zos_copy depedency from test cases (#1156) * removed zos_copy from zos_archive tests * Added changelog --- .../1156-zos_archive-remove-zos_copy_dep.yml | 3 +++ tests/functional/modules/test_zos_archive_func.py | 14 ++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml diff --git a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml new file mode 100644 index 000000000..ea8aacee9 --- /dev/null +++ b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - Remove zos_copy dependency from zos_archive test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1156). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 2705a7137..32bedb4fe 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -43,9 +43,9 @@ int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; @@ -857,12 +857,10 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ format_dict["format_options"] = dict(terse_pack="SPACK") # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(ds_to_write), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format(ds_to_write), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") # submit jcl From 066e76bff72e907bd3698217c607b9fa11165653 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 22 Jan 2024 13:51:17 -0600 Subject: [PATCH 282/413] [v1.10.0][zos_blockinfile] Remove zos_copy from test cases (#1167) * Removed zos_copy from test cases * Added changelog * Added file creation --- ...ve-zos-copy-from-zos-blockinfile-tests.yml | 3 +++ .../modules/test_zos_blockinfile_func.py | 26 +++++++++---------- 2 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml diff --git a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml new file mode 100644 index 000000000..d7fb725af --- /dev/null +++ b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_blockinfile - Remove zos_copy dependency from zos_blockinfile test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1167). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index d768ad59d..b2e567dc1 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -29,9 +29,9 @@ int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; @@ -1268,12 +1268,11 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.file(path="/tmp/disp_shr/", state="directory") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) @@ -1458,12 +1457,11 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.file(path="/tmp/disp_shr/", state="directory") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) From 410925dfa16b6e658e772d7e37cbd8e98af540ea Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 22 Jan 2024 13:51:42 -0600 Subject: [PATCH 283/413] [v1.10.0][zos_lineinfile] Remove zos encode from zos lineinfile (#1179) * Started removing encoding from tests * WIP * WIP * Added conversion * Removed zos_encode * Added changelog * Removed unused code --- ...move-zos_encode-from_zos_lineinfile-tests.yml | 3 +++ .../modules/test_zos_lineinfile_func.py | 16 ++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml diff --git a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml new file mode 100644 index 000000000..a95e1c7e2 --- /dev/null +++ b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - Remove zos_encode dependency from zos_lineinfile test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1179). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 94f94cb7a..e415a76e8 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -242,7 +242,7 @@ def remove_ds_environment(ansible_zos_module, DS_NAME): # not supported data set types NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] # The encoding will be only use on a few test -ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] +ENCODING = [ 'ISO8859-1', 'UTF-8'] ######################### # USS test cases @@ -1005,7 +1005,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): def test_uss_encoding(ansible_zos_module, encoding): hosts = ansible_zos_module insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) params["encoding"] = encoding full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = "SIMPLE LINE TO VERIFY" @@ -1013,12 +1013,11 @@ def test_uss_encoding(ansible_zos_module, encoding): hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) hosts.all.file(path=full_path, state="touch") hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) - hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) params["path"] = full_path results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {encoding} {full_path}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING finally: @@ -1032,7 +1031,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts = ansible_zos_module ds_type = dstype insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) params["encoding"] = encoding test_name = "DST13" temp_file = "/tmp/{0}".format(test_name) @@ -1040,7 +1039,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) - hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") hosts.all.zos_data_set(name=ds_name, type=ds_type) if ds_type in ["PDS", "PDSE"]: ds_full_name = ds_name + "(MEM)" @@ -1055,9 +1054,10 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + hosts.all.shell(cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(ds_full_name)) for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING finally: remove_ds_environment(ansible_zos_module, ds_name) \ No newline at end of file From 95f8c23fb6856ac68e204709aef263e9ffa00b62 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 29 Jan 2024 19:37:33 -0600 Subject: [PATCH 284/413] [v1.10.0] [zos_copy] Enhance zos_copy performance when copying multiple PDS members (#1183) * [v1.9.0] [zos_copy] Enhancement/764/copy members (#1176) * Changed member copy into a bulk call * Modified copy to members * Cleaned code * Removed hardcoded content from zos_copy tests * Added fix for uss files * Added distinction between uss and mvs * Added alias fix * Moved the copy section to below * Modified for seq test * Added fix for copy dest lock * Added msgs for debugging * Added final changes to member copy * Added copy for when seq to pdse * Add changelog * Added a line into docs * Modified doc * Modified doc * Update changelog * Created a new changelog * Corrected typo --- changelogs/fragments/1183-copy-members.yml | 3 + plugins/modules/zos_copy.py | 61 +++++++++++++------ .../functional/modules/test_zos_copy_func.py | 11 ++-- 3 files changed, 52 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/1183-copy-members.yml diff --git a/changelogs/fragments/1183-copy-members.yml b/changelogs/fragments/1183-copy-members.yml new file mode 100644 index 000000000..b0b0c7896 --- /dev/null +++ b/changelogs/fragments/1183-copy-members.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. + (https://github.com/ansible-collections/ibm_zos_core/pull/1183). diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9d411f459..e07b44a97 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -283,6 +283,8 @@ - If C(src) is a directory and ends with "/", the contents of it will be copied into the root of C(dest). If it doesn't end with "/", the directory itself will be copied. + - If C(src) is a directory or a file, file names will be truncated and/or modified + to ensure a valid name for a data set or member. - If C(src) is a VSAM data set, C(dest) must also be a VSAM. - Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. @@ -1705,33 +1707,56 @@ def copy_to_pdse( existing_members = datasets.list_members(dest) # fyi - this list includes aliases overwritten_members = [] new_members = [] + bulk_src_members = "" + result = dict() for src_member, destination_member in zip(src_members, dest_members): if destination_member in existing_members: overwritten_members.append(destination_member) else: new_members.append(destination_member) - + bulk_src_members += "{0} ".format(src_member) + + # Copy section + if src_ds_type == "USS" or self.asa_text or len(src_members) == 1: + """ + USS -> MVS : Was kept on member by member basis bc file names longer than 8 + characters will throw an error when copying to a PDS, because of the member name + character limit. + MVS -> MVS (asa only): This has to be copied on member by member basis bc OPUT + does not allow for bulk member copy or entire PDS to PDS copy. + """ + for src_member, destination_member in zip(src_members, dest_members): + result = self.copy_to_member( + src_member, + "{0}({1})".format(dest, destination_member), + src_ds_type + ) + else: + """ + MVS -> MVS + Copies a list of members into a PDS, using this list of members greatly + enhances performance of datasets_copy. + """ result = self.copy_to_member( - src_member, - "{0}({1})".format(dest, destination_member), + bulk_src_members, + dest, src_ds_type ) - if result["rc"] != 0: - msg = "Unable to copy source {0} to data set member {1}({2})".format( - new_src, - dest, - destination_member - ) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"], - overwritten_members=overwritten_members, - new_members=new_members - ) + if result["rc"] != 0: + msg = "Unable to copy source {0} to {1}.".format( + new_src, + dest + ) + raise CopyOperationError( + msg=msg, + rc=result["rc"], + stdout=result["out"], + stderr=result["err"], + overwritten_members=overwritten_members, + new_members=new_members + ) def copy_to_member( self, diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 15e1cd499..b6fee6689 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -1958,7 +1958,7 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type", ["PDS", "PDSE", "SEQ"]) +@pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"]) def test_copy_dest_lock(ansible_zos_module, ds_type): DATASET_1 = "USER.PRIVATE.TESTDS" DATASET_2 = "ADMI.PRIVATE.TESTDS" @@ -1971,8 +1971,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): dest_data_set = DATASET_2 try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DATASET_1, state="present", type="pdse", replace=True) - hosts.all.zos_data_set(name=DATASET_2, state="present", type="pdse", replace=True) + hosts.all.zos_data_set(name=DATASET_1, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=DATASET_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) @@ -4323,9 +4323,10 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): hosts = ansible_zos_module source = "USER.TEST.FUNCTEST.SRC" dest = "USER.TEST.FUNCTEST.DEST" - + source_member = "USER.TEST.FUNCTEST.SRC(MEMBER)" try: hosts.all.zos_data_set(name=source, type=src_type, state='present') + hosts.all.zos_data_set(name=source_member, type="member", state='present') copy_res = hosts.all.zos_copy( src=source, dest=dest, From ad72db6add10730163037ed7b5f8e7e12695e3d8 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Tue, 30 Jan 2024 10:01:26 -0800 Subject: [PATCH 285/413] zos_operator - ZOAU 1.3.0 migration (#1181) * adjust value of timeout param which the module measures in seconds to centiseconds for zoau v1.3.x+ migration Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename timeout param in helper functions to distinguish unit of measurement - timeout_s and timeout_c Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright year Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1181-zoau-migration-zos_operator.yml | 4 ++++ plugins/modules/zos_operator.py | 12 ++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1181-zoau-migration-zos_operator.yml diff --git a/changelogs/fragments/1181-zoau-migration-zos_operator.yml b/changelogs/fragments/1181-zoau-migration-zos_operator.yml new file mode 100644 index 000000000..7c107de88 --- /dev/null +++ b/changelogs/fragments/1181-zoau-migration-zos_operator.yml @@ -0,0 +1,4 @@ +trivial: + - zos_operator - Update internal functions to account for the change to the + unit of measurement of `timeout` now in centiseconds. + (https://github.com/ansible-collections/ibm_zos_core/pull/1181). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index ca6935163..6281c5cd6 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -172,9 +172,13 @@ ZOAU_API_VERSION = "1.2.0" -def execute_command(operator_cmd, timeout=1, *args, **kwargs): +def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + + # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: + timeout_c = 100 * timeout_s + start = timer() - response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) + response = opercmd.execute(operator_cmd, timeout=timeout_c, *args, **kwargs) end = timer() rc = response.rc stdout = response.stdout_response @@ -293,7 +297,7 @@ def run_operator_command(params): kwargs.update({"wait": True}) args = [] - rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) if rc > 0: message = "\nOut: {0}\nErr: {1}\nRan: {2}".format(stdout, stderr, cmdtxt) From 4caa946420ad64690407ebf768cee963100e6300 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 30 Jan 2024 11:03:09 -0700 Subject: [PATCH 286/413] [v1.10.0] [Enabler] [job] ZOAU v1.3.0 migration for job.py (#1169) * Update calls to jobs * Update copyright years * Add changelog fragment * Rename changelog fragment * Fix references to None types * Update 1169-util-job-zoau-migration.yml --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1169-util-job-zoau-migration.yml | 3 + plugins/module_utils/job.py | 90 +++++++++++++------ 2 files changed, 66 insertions(+), 27 deletions(-) create mode 100644 changelogs/fragments/1169-util-job-zoau-migration.yml diff --git a/changelogs/fragments/1169-util-job-zoau-migration.yml b/changelogs/fragments/1169-util-job-zoau-migration.yml new file mode 100644 index 000000000..568aa9a4e --- /dev/null +++ b/changelogs/fragments/1169-util-job-zoau-migration.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/job.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1169). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1b8cb06f6..3d7d80d68 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -71,16 +71,27 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru ) parser = BetterArgParser(arg_defs) - parsed_args = parser.parse_args( - {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name} - ) + parsed_args = parser.parse_args({ + "job_id": job_id, + "owner": owner, + "job_name": job_name, + "dd_name": dd_name + }) job_id = parsed_args.get("job_id") or "*" job_name = parsed_args.get("job_name") or "*" owner = parsed_args.get("owner") or "*" dd_name = parsed_args.get("dd_name") or "" - job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, duration=duration, dd_scan=dd_scan, timeout=timeout, start_time=start_time) + job_detail = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_name=dd_name, + duration=duration, + dd_scan=dd_scan, + timeout=timeout, + start_time=start_time + ) # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): # current_time = timer() @@ -92,13 +103,22 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru job_id = "" if job_id == "*" else job_id owner = "" if owner == "*" else owner job_name = "" if job_name == "*" else job_name - job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, dd_scan=dd_scan, duration=duration, timeout=timeout, start_time=start_time) + + job_detail = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_name=dd_name, + dd_scan=dd_scan, + duration=duration, + timeout=timeout, + start_time=start_time + ) return job_detail def _job_not_found(job_id, owner, job_name, dd_name): - # Note that the text in the msg_txt is used in test cases thus sensitive to change + # Note that the text in the msg_txt is used in test cases and thus sensitive to change jobs = [] if job_id != '*' and job_name != '*': job_not_found_msg = "{0} with the job_id {1}".format(job_name.upper(), job_id.upper()) @@ -170,13 +190,24 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): job_name = parsed_args.get("job_name") or "*" owner = parsed_args.get("owner") or "*" - job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) + job_status_result = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_scan=False + ) if len(job_status_result) == 0: job_id = "" if job_id == "*" else job_id job_name = "" if job_name == "*" else job_name owner = "" if owner == "*" else owner - job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) + + job_status_result = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_scan=False + ) return job_status_result @@ -223,16 +254,13 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # This will also help maintain compatibility with 1.2.3 final_entries = [] - kwargs = { - "job_id": job_id_temp, - } - entries = jobs.listing(**kwargs) + entries = jobs.fetch_multiple(job_id=job_id_temp) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = jobs.listing(**kwargs) + entries = jobs.fetch_multiple(job_id=job_id_temp) if entries: for entry in entries: @@ -243,30 +271,35 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T if not fnmatch.fnmatch(entry.name, job_name): continue if job_id_temp is not None: - if not fnmatch.fnmatch(entry.id, job_id): + if not fnmatch.fnmatch(entry.job_id, job_id): continue job = {} - job["job_id"] = entry.id + job["job_id"] = entry.job_id job["job_name"] = entry.name job["subsystem"] = "" job["system"] = "" job["owner"] = entry.owner job["ret_code"] = {} - job["ret_code"]["msg"] = entry.status + " " + entry.rc - job["ret_code"]["msg_code"] = entry.rc + job["ret_code"]["msg"] = "{0} {1}".format(entry.status, entry.return_code) + job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None - if len(entry.rc) > 0: - if entry.rc.isdigit(): - job["ret_code"]["code"] = int(entry.rc) + if entry.return_code and len(entry.return_code) > 0: + if entry.return_code.isdigit(): + job["ret_code"]["code"] = int(entry.return_code) job["ret_code"]["msg_text"] = entry.status # this section only works on zoau 1.2.3/+ vvv + # Beginning in ZOAU v1.3.0, the Job class changes svc_class to + # service_class. + if zoau_version_checker.is_zoau_version_higher_than("1.2.5"): + job["service_class"] = entry.service_class + elif zoau_version_checker.is_zoau_version_higher_than("1.2.2"): + job["svc_class"] = entry.svc_class if zoau_version_checker.is_zoau_version_higher_than("1.2.2"): job["job_class"] = entry.job_class - job["svc_class"] = entry.svc_class job["priority"] = entry.priority job["asid"] = entry.asid job["creation_date"] = str(entry.creation_datetime)[0:10] @@ -284,12 +317,12 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = jobs.list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.job_id) while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = jobs.list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.job_id) job["duration"] = duration @@ -335,7 +368,10 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T if "stepname" in single_dd: if "dataset" in single_dd: tmpcont = jobs.read_output( - entry.id, single_dd["stepname"], single_dd["dataset"]) + entry.job_id, + single_dd["stepname"], + single_dd["dataset"] + ) dd["content"] = tmpcont.split("\n") job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) From 433cfc0bf85dddd9afcb9c0f2729d0613366b13d Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 30 Jan 2024 12:05:19 -0600 Subject: [PATCH 287/413] [v1.10.0] [module_utils/copy.py] Implement ZOAU 1.3 migration changes into module_utils/copy.py (#1187) * Replaced zoau datasets import * Updated changelog * Updated changelog * Modified copyright year * Update 1187-migrate-module-utils-copy.yml --- .../fragments/1187-migrate-module-utils-copy.yml | 3 +++ plugins/module_utils/copy.py | 12 ++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1187-migrate-module-utils-copy.yml diff --git a/changelogs/fragments/1187-migrate-module-utils-copy.yml b/changelogs/fragments/1187-migrate-module-utils-copy.yml new file mode 100644 index 000000000..26157f9fc --- /dev/null +++ b/changelogs/fragments/1187-migrate-module-utils-copy.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/copy.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1187). diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index ac9e74758..71b47c974 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2023 +# Copyright (c) IBM Corporation 2019-2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -283,7 +283,15 @@ def copy_asa_pds2uss(src, dest): str -- The stderr after the copy command executed successfully """ from os import path - from zoautil_py import datasets + import traceback + from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError, + ) + + try: + from zoautil_py import datasets + except Exception: + datasets = ZOAUImportError(traceback.format_exc()) src = _validate_data_set_name(src) dest = _validate_path(dest) From 032d0d83bbd80b51d51cb880fb0914524b906211 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 30 Jan 2024 12:06:25 -0600 Subject: [PATCH 288/413] [v1.10.0] [module_utils/dd_statement.py] Implement ZOAU 1.3 migration changes into module_utils/dd_statement.py (#1190) * Migrated module_utils/dd_statement * Added changelog * Update 1190-migrate-module_utils-dd_statement.yml --- .../1190-migrate-module_utils-dd_statement.yml | 3 +++ plugins/module_utils/dd_statement.py | 12 ++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1190-migrate-module_utils-dd_statement.yml diff --git a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml new file mode 100644 index 000000000..4bb3a582d --- /dev/null +++ b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/dd_statement.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1190). diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index d35f9e44e..57b7bcdad 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -10,11 +10,11 @@ # limitations under the License. from __future__ import absolute_import, division, print_function - +import traceback __metaclass__ = type from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet @@ -22,7 +22,7 @@ try: from zoautil_py import datasets except ImportError: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) space_units = {"b": "", "kb": "k", "mb": "m", "gb": "g"} @@ -651,8 +651,8 @@ def __init__(self, tmphlq=None): if tmphlq: hlq = tmphlq else: - hlq = datasets.hlq() - name = datasets.tmp_name(hlq) + hlq = datasets.get_hlq() + name = datasets.tmp_name(high_level_qualifier=hlq) super().__init__(name) def __del__(self): From 7ef0c9e6ca142a6264348c96a7eb68f8b9b8a965 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 31 Jan 2024 13:52:39 -0600 Subject: [PATCH 289/413] [v1.10.0] [module_utils/backup.py] Implement ZOAU 1.3 migration changes into module_utils/backup.py (#1188) * Changed datasets call to zoau 1.3 * Updated changelog * Corrected changelog location and tag * Corrected copyright dates * Removed wrong exception raise * Removed unused var --- .../1188-migrate-module_utils-backup.yml | 3 ++ plugins/module_utils/backup.py | 31 +++++++++++-------- 2 files changed, 21 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1188-migrate-module_utils-backup.yml diff --git a/changelogs/fragments/1188-migrate-module_utils-backup.yml b/changelogs/fragments/1188-migrate-module_utils-backup.yml new file mode 100644 index 000000000..65945d06b --- /dev/null +++ b/changelogs/fragments/1188-migrate-module_utils-backup.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/backup.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1188). \ No newline at end of file diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 28339d842..46f8669c5 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -22,8 +22,9 @@ import time from shutil import copy2, copytree, rmtree +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, @@ -39,9 +40,10 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import iebcopy try: - from zoautil_py import datasets + from zoautil_py import datasets, exceptions except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + exceptions = ZOAUImportError(traceback.format_exc()) if PY3: from shlex import quote else: @@ -76,29 +78,32 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name()) bk_dsn = _validate_data_set_name(bk_dsn).upper() - response = datasets._copy(dsn, bk_dsn) - if response.rc != 0: + try: + datasets.copy(dsn, bk_dsn) + except exceptions.ZOAUException as copy_exception: raise BackupError( "Unable to backup {0} to {1}".format(dsn, bk_dsn), - rc=response.rc, - stdout=response.stdout_response, - stderr=response.stderr_response + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response ) else: if not bk_dsn: if tmphlq: hlq = tmphlq else: - hlq = datasets.hlq() - bk_dsn = datasets.tmp_name(hlq) + hlq = datasets.get_hlq() + bk_dsn = datasets.tmp_name(high_level_qualifier=hlq) bk_dsn = _validate_data_set_name(bk_dsn).upper() # In case the backup ds is a member we trust that the PDS attributes are ok to fit the src content. # This should not delete a PDS just to create a backup member. # Otherwise, we allocate the appropiate space for the backup ds based on src. if is_member(bk_dsn): - cp_response = datasets._copy(dsn, bk_dsn) - cp_rc = cp_response.rc + try: + cp_rc = datasets.copy(dsn, bk_dsn) + except exceptions.ZOAUException as copy_exception: + cp_rc = copy_exception.response.rc else: cp_rc = _copy_ds(dsn, bk_dsn) From 79a1ce1a00fba1495f80b5ccd5db7f5e50825cac Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 31 Jan 2024 14:09:09 -0600 Subject: [PATCH 290/413] [v1.10.0][module_utils/data_set.py] Implement ZOAU 1.3 migration changes into module_utils/data_set.py (#1182) * Made changes to module utils * Added traceback import * Updated changelog * Changed year * Updated zoau import * Update 1182-migrate-module-utils-data-set.yml * Changed build zoau args to dataset type --- .../1182-migrate-module-utils-data-set.yml | 3 ++ plugins/module_utils/data_set.py | 45 ++++++++++++------- 2 files changed, 31 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1182-migrate-module-utils-data-set.yml diff --git a/changelogs/fragments/1182-migrate-module-utils-data-set.yml b/changelogs/fragments/1182-migrate-module-utils-data-set.yml new file mode 100644 index 000000000..857327254 --- /dev/null +++ b/changelogs/fragments/1182-migrate-module-utils-data-set.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/data_set.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1182). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 12265e1b4..8b02d77f4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -15,6 +15,7 @@ import re import tempfile +import traceback from os import path, walk from string import ascii_uppercase, digits from random import sample @@ -24,8 +25,8 @@ AnsibleModuleHelper, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, MissingImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( @@ -39,9 +40,10 @@ vtoc = MissingImport("vtoc") try: - from zoautil_py import datasets + from zoautil_py import datasets, exceptions except ImportError: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + exceptions = ZOAUImportError(traceback.format_exc()) class DataSet(object): @@ -316,7 +318,11 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo # Now adding special parameters for sequential and partitioned # data sets. if model_type not in DataSet.MVS_VSAM: - block_size = datasets.listing(model)[0].block_size + try: + data_set = datasets.list_datasets(model)[0] + except IndexError: + raise AttributeError("Could not retrieve model data set block size.") + block_size = data_set.block_size alloc_cmd = """{0} - BLKSIZE({1})""".format(alloc_cmd, block_size) @@ -500,7 +506,7 @@ def data_set_volume(name): DatasetVolumeError: When the function is unable to parse the value of VOLSER. """ - data_set_information = datasets.listing(name) + data_set_information = datasets.list_datasets(name) if len(data_set_information) > 0: return data_set_information[0].volume @@ -535,10 +541,10 @@ def data_set_type(name, volume=None): if not DataSet.data_set_exists(name, volume): return None - data_sets_found = datasets.listing(name) + data_sets_found = datasets.list_datasets(name) # Using the DSORG property when it's a sequential or partitioned - # dataset. VSAMs are not found by datasets.listing. + # dataset. VSAMs are not found by datasets.list_datasets. if len(data_sets_found) > 0: return data_sets_found[0].dsorg @@ -912,7 +918,7 @@ def _build_zoau_args(**kwargs): volumes = ",".join(volumes) if volumes else None kwargs["space_primary"] = primary kwargs["space_secondary"] = secondary - kwargs["type"] = type + kwargs["dataset_type"] = type kwargs["volumes"] = volumes kwargs.pop("space_type", None) renamed_args = {} @@ -946,7 +952,7 @@ def create( force=None, ): """A wrapper around zoautil_py - Dataset.create() to raise exceptions on failure. + datasets.create() to raise exceptions on failure. Reasonable default arguments will be set by ZOAU when necessary. Args: @@ -1007,17 +1013,22 @@ def create( """ original_args = locals() formatted_args = DataSet._build_zoau_args(**original_args) - response = datasets._create(**formatted_args) - if response.rc > 0: + try: + datasets.create(**formatted_args) + except (exceptions.ZOAUException, exceptions.DatasetVerificationError) as create_exception: raise DatasetCreateError( - name, response.rc, response.stdout_response + response.stderr_response + name, + create_exception.response.rc, + create_exception.response.stdout_response + create_exception.response.stderr_response ) - return response.rc + # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned + # response.rc now we just return 0 if nothing failed + return 0 @staticmethod def delete(name): """A wrapper around zoautil_py - Dataset.delete() to raise exceptions on failure. + datasets.delete() to raise exceptions on failure. Arguments: name (str) -- The name of the data set to delete. @@ -1056,7 +1067,7 @@ def create_member(name): @staticmethod def delete_member(name, force=False): """A wrapper around zoautil_py - Dataset.delete_members() to raise exceptions on failure. + datasets.delete_members() to raise exceptions on failure. Arguments: name (str) -- The name of the data set, including member name, to delete. @@ -1306,7 +1317,7 @@ def temp_name(hlq=""): str: The temporary data set name. """ if not hlq: - hlq = datasets.hlq() + hlq = datasets.get_hlq() temp_name = datasets.tmp_name(hlq) return temp_name From 0d889000ede8d6620ecc0be4c56b809a50db5275 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 31 Jan 2024 13:09:45 -0700 Subject: [PATCH 291/413] [Enabler] [zos_copy] Remove zos_fetch call in loadlib test (#1184) * Remove zos_fetch call in loadlib test * Add changelog fragment * Change use of cp to dcp * Add delay to last zos_copy call * Change dcp call * Disable cleanup temporarily * Change tmp dir used * Change scp for sftp * Turn cleanup on once again * Removed print statement --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...184-remove-zos-fetch-dep-from-zos-copy.yml | 3 ++ .../functional/modules/test_zos_copy_func.py | 34 +++++++++++++++---- 2 files changed, 31 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml diff --git a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml new file mode 100644 index 000000000..9085743d9 --- /dev/null +++ b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Remove zos_fetch dependency from zos_copy test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1184). diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index b6fee6689..2cc11c9dd 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -3434,7 +3434,6 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): @pytest.mark.aliases @pytest.mark.parametrize("is_created", [False, True]) def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): - hosts = ansible_zos_module cobol_src_pds = "USER.COBOL.SRC" @@ -3444,6 +3443,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): dest_lib = "USER.LOAD.DEST" pgm_mem = "HELLO" pgm2_mem = "HELLO2" + uss_location = "/tmp/loadlib" try: @@ -3487,11 +3487,32 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): validate_loadlib_pgm(hosts, steplib=src_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) # fetch loadlib into local - tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") - # fetch loadlib to local - fetch_result = hosts.all.zos_fetch(src=src_lib, dest=tmp_folder.name, is_binary=True) - for res in fetch_result.contacted.values(): - source_path = res.get("dest") + # Copying the loadlib to USS. + hosts.all.file(name=uss_location, state='directory') + hosts.all.shell( + cmd=f"dcp -X -I \"{src_lib}\" {uss_location}", + executable=SHELL_EXECUTABLE + ) + + # Copying the remote loadlibs in USS to a local dir. + # This section ONLY handles ONE host, so if we ever use multiple hosts to + # test, we will need to update this code. + remote_user = hosts["options"]["user"] + # Removing a trailing comma because the framework saves the hosts list as a + # string instead of a list. + remote_host = hosts["options"]["inventory"].replace(",", "") + + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + cmd = [ + "sftp", + "-r", + f"{remote_user}@{remote_host}:{uss_location}", + f"{tmp_folder.name}" + ] + with subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE) as sftp_proc: + result = sftp_proc.stdout.read() + + source_path = os.path.join(tmp_folder.name, os.path.basename(uss_location)) if not is_created: # ensure dest data sets absent for this variation of the test case. @@ -3562,6 +3583,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set(name=cobol_src_pds, state="absent") hosts.all.zos_data_set(name=src_lib, state="absent") hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.file(name=uss_location, state="absent") @pytest.mark.pdse From 2109a5cdc42f25bb9a39b53d5f2216bab00e714e Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 31 Jan 2024 14:10:16 -0600 Subject: [PATCH 292/413] [v1.10.0] [module_utils/encode.py] Implement ZOAU 1.3 migration changes into module_utils/encode.py (#1189) * Updated module_utils encode * Updated changelog * Update 1189-migrate-module_utils-encode.yml * Modified datasets.create call * Changed datasets.create call --- .../1189-migrate-module_utils-encode.yml | 3 +++ plugins/module_utils/encode.py | 24 +++++++++---------- 2 files changed, 15 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1189-migrate-module_utils-encode.yml diff --git a/changelogs/fragments/1189-migrate-module_utils-encode.yml b/changelogs/fragments/1189-migrate-module_utils-encode.yml new file mode 100644 index 000000000..d7f471847 --- /dev/null +++ b/changelogs/fragments/1189-migrate-module_utils-encode.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/encode.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1189). diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 26bb983b3..195802583 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -24,9 +24,10 @@ import os import re import locale +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, @@ -39,7 +40,7 @@ try: from zoautil_py import datasets except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) if PY3: @@ -188,24 +189,23 @@ def temp_data_set(self, reclen, space_u): str -- Name of the allocated data set Raises: - OSError: When any exception is raised during the data set allocation + ZOAUException: When any exception is raised during the data set allocation. + DatasetVerificationError: When the data set creation could not be verified. """ size = str(space_u * 2) + "K" if self.tmphlq: hlq = self.tmphlq else: - hlq = datasets.hlq() - temp_ps = datasets.tmp_name(hlq) - response = datasets._create( + hlq = datasets.get_hlq() + temp_ps = datasets.tmp_name(high_level_qualifier=hlq) + temporary_data_set = datasets.create( name=temp_ps, - type="SEQ", + dataset_type="SEQ", primary_space=size, record_format="VB", record_length=reclen, ) - if response.rc: - raise OSError("Failed when allocating temporary sequential data set!") - return temp_ps + return temporary_data_set.name def get_codeset(self): """Get the list of supported encodings from the USS command 'iconv -l' @@ -406,7 +406,7 @@ def mvs_convert_encoding( rc, out, err = copy.copy_pds2uss(src, temp_src) if src_type == "VSAM": reclen, space_u = self.listdsi_data_set(src.upper()) - # RDW takes the first 4 bytes or records in the VB format, hence we need to add an extra buffer to the vsam max recl. + # RDW takes the first 4 bytes in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps) From f81108d5366f79667070bb5fbd604f2b83db4f77 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Thu, 1 Feb 2024 11:17:31 -0800 Subject: [PATCH 293/413] [v1.10.0] [zos_gather_facts] ZOAU 1.3 migration - zos_gather_facts (#1196) * update module to leverage zoau python api for zinfo Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address pep8 issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update catch-all error message Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../1196-zoau-migration-zos_gather_facts.yml | 4 + plugins/modules/zos_gather_facts.py | 87 ++++++++----------- .../modules/test_zos_gather_facts_func.py | 3 +- tests/unit/test_zos_gather_facts.py | 29 ++++--- 4 files changed, 56 insertions(+), 67 deletions(-) create mode 100644 changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml diff --git a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml new file mode 100644 index 000000000..03f39b535 --- /dev/null +++ b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml @@ -0,0 +1,4 @@ +trivial: + - zos_gather_facts - Update module internally to leverage ZOAU python API + for zinfo. + (https://github.com/ansible-collections/ibm_zos_core/pull/1196). \ No newline at end of file diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index b7aeb7ee4..2ea7b0baf 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -108,30 +108,38 @@ """ from fnmatch import fnmatch -import json +import traceback from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( zoau_version_checker ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError, +) + +try: + from zoautil_py import zsystem +except ImportError: + zsystem = ZOAUImportError(traceback.format_exc()) + -def zinfo_cmd_string_builder(gather_subset): - """Builds a command string for 'zinfo' based off the gather_subset list. +def zinfo_facts_list_builder(gather_subset): + """Builds a list of strings to pass into 'zinfo' based off the + gather_subset list. Arguments: gather_subset {list} -- A list of subsets to pass in. Returns: - [str] -- A string that contains a command line argument for calling - zinfo with the appropriate options. + [list[str]] -- A list of strings that contains sanitized subsets. [None] -- An invalid value was received for the subsets. """ if gather_subset is None or 'all' in gather_subset: - return "zinfo -j -a" + return ["all"] # base value - zinfo_arg_string = "zinfo -j" + subsets_list = [] - # build full string for subset in gather_subset: # remove leading/trailing spaces subset = subset.strip() @@ -141,9 +149,9 @@ def zinfo_cmd_string_builder(gather_subset): # sanitize subset against malicious (probably alphanumeric only?) if not subset.isalnum(): return None - zinfo_arg_string += " -t " + subset + subsets_list.append(subset) - return zinfo_arg_string + return subsets_list def flatten_zinfo_json(zinfo_dict): @@ -214,59 +222,36 @@ def run_module(): if module.check_mode: module.exit_json(**result) - if not zoau_version_checker.is_zoau_version_higher_than("1.2.1"): + if not zoau_version_checker.is_zoau_version_higher_than("1.3.0"): module.fail_json( - ("The zos_gather_facts module requires ZOAU >= 1.2.1. Please " + ("The zos_gather_facts module requires ZOAU >= 1.3.0. Please " "upgrade the ZOAU version on the target node.") ) gather_subset = module.params['gather_subset'] - # build out zinfo command with correct options + # build out list of strings to pass to zinfo python api. # call this whether or not gather_subsets list is empty/valid/etc - # rely on the function to report back errors. Note the function only + # rely on the helper function to report back errors. Note the function only # returns None if there's malicious or improperly formatted subsets. - # Invalid subsets are caught when the actual zinfo command is run. - cmd = zinfo_cmd_string_builder(gather_subset) - if not cmd: + # Invalid subsets are caught when the actual zinfo function is run. + facts_list = zinfo_facts_list_builder(gather_subset) + if not facts_list: module.fail_json(msg="An invalid subset was passed to Ansible.") - rc, fcinfo_out, err = module.run_command(cmd, encoding=None) - - decode_str = fcinfo_out.decode('utf-8') - - # We DO NOT return a partial list. Instead we FAIL FAST since we are - # targeting automation -- quiet but well-intended error messages may easily - # be skipped - if rc != 0: - # there are 3 known error messages in zinfo, if neither gets - # triggered then we send out this generic zinfo error message. - err_msg = ('An exception has occurred in Z Open Automation Utilities ' - '(ZOAU) utility \'zinfo\'. See \'zinfo_err_msg\' for ' - 'additional details.') - # triggered by invalid optarg eg "zinfo -q" - if 'BGYSC5201E' in err.decode('utf-8'): - err_msg = ('Invalid call to zinfo. See \'zinfo_err_msg\' for ' - 'additional details.') - # triggered when optarg does not get expected arg eg "zinfo -t" - elif 'BGYSC5202E' in err.decode('utf-8'): - err_msg = ('Invalid call to zinfo. Possibly missing a valid subset' - ' See \'zinfo_err_msg\' for additional details.') - # triggered by illegal subset eg "zinfo -t abc" - elif 'BGYSC5203E' in err.decode('utf-8'): - err_msg = ('An invalid subset was detected. See \'zinfo_err_msg\' ' - 'for additional details.') - - module.fail_json(msg=err_msg, zinfo_err_msg=err) - zinfo_dict = {} # to track parsed zinfo facts. try: - zinfo_dict = json.loads(decode_str) - except json.JSONDecodeError: - # tell user something else for this error? This error is thrown when - # Python doesn't like the json string it parsed from zinfo. - module.fail_json(msg="Unsupported JSON format for the output.") + zinfo_dict = zsystem.zinfo(json=True, facts=facts_list) + except ValueError: + err_msg = 'An invalid subset was detected.' + module.fail_json(msg=err_msg) + except Exception as e: + err_msg = ( + 'An exception has occurred. Unable to gather facts. ' + 'See stderr for more details.' + ) + module.fail_json(msg=err_msg, stderr=str(e)) # remove zinfo subsets from parsed zinfo result, flatten by one level flattened_d = flatten_zinfo_json(zinfo_dict) diff --git a/tests/functional/modules/test_zos_gather_facts_func.py b/tests/functional/modules/test_zos_gather_facts_func.py index 1903f0cbd..f2861c596 100644 --- a/tests/functional/modules/test_zos_gather_facts_func.py +++ b/tests/functional/modules/test_zos_gather_facts_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 +# Copyright (c) IBM Corporation 2022 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -120,7 +120,6 @@ def test_with_gather_subset_bad(ansible_zos_module, gather_subset): for result in results.contacted.values(): assert result is not None - assert re.match(r'^BGYSC5203E', result.get('zinfo_err_msg')) assert re.match(r'^An invalid subset', result.get('msg')) diff --git a/tests/unit/test_zos_gather_facts.py b/tests/unit/test_zos_gather_facts.py index 84b90c186..a7ab4a803 100644 --- a/tests/unit/test_zos_gather_facts.py +++ b/tests/unit/test_zos_gather_facts.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 +# Copyright (c) IBM Corporation 2022 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -18,7 +18,6 @@ __metaclass__ = type import pytest -from mock import call # Used my some mock modules, should match import directly below IMPORT_NAME = "ibm_zos_core.plugins.modules.zos_gather_facts" @@ -26,30 +25,32 @@ # Tests for zos_father_facts helper functions test_data = [ - (["ipl"], "zinfo -j -t ipl"), - (["ipl "], "zinfo -j -t ipl"), - ([" ipl"], "zinfo -j -t ipl"), - (["ipl", "sys"], "zinfo -j -t ipl -t sys"), - (["all"], "zinfo -j -a"), - (None, "zinfo -j -a"), - (["ipl", "all", "sys"], "zinfo -j -a"), + (["ipl"], ["ipl"]), + (["ipl "], ["ipl"]), + ([" ipl"], ["ipl"]), + (["ipl", "sys"], ["ipl", "sys"]), + (["all"], ["all"]), + (None, ["all"]), + (["ipl", "all", "sys"], ["all"]), # function does not validate legal vs illegal subsets - (["asdf"], "zinfo -j -t asdf"), - ([""], None), # attemtped injection + (["asdf"], ["asdf"]), + ([""], None), (["ipl; cat /.bashrc"], None), # attemtped injection + # for now, 'all' with some other invalid subset resolves to 'all' + (["ipl", "all", "ipl; cat /.ssh/id_rsa"], ["all"]), ] @pytest.mark.parametrize("args,expected", test_data) -def test_zos_gather_facts_zinfo_cmd_string_builder( +def test_zos_gather_facts_zinfo_facts_list_builder( zos_import_mocker, args, expected): mocker, importer = zos_import_mocker zos_gather_facts = importer(IMPORT_NAME) try: - result = zos_gather_facts.zinfo_cmd_string_builder(args) -# # add more logic here as the function evolves. + result = zos_gather_facts.zinfo_facts_list_builder(args) + # add more logic here as the function evolves. except Exception: result = None assert result == expected From 3b3176b3fbae2f366b84883d01fc7f11a32f963f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 2 Feb 2024 12:51:17 -0600 Subject: [PATCH 294/413] [v1.10.0] [zos_backup_restore]Added choices for space type (#1200) * Added choices for space type * Added changelog --- .../fragments/1200-zos_backup_restore-sanity-issues.yml | 4 ++++ plugins/modules/zos_backup_restore.py | 2 +- tests/sanity/ignore-2.14.txt | 1 - tests/sanity/ignore-2.15.txt | 1 - tests/sanity/ignore-2.16.txt | 1 - 5 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml diff --git a/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml b/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml new file mode 100644 index 000000000..27d40f560 --- /dev/null +++ b/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml @@ -0,0 +1,4 @@ +trivial: + - zos_backup_restore - Added space type choices to argument spec to remove + validate-modules:doc-choices-do-not-match-spec. + (https://github.com/ansible-collections/ibm_zos_core/pull/1200). diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 9d8560306..080c7efab 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -337,7 +337,7 @@ def main(): ), ), space=dict(type="int", required=False, aliases=["size"]), - space_type=dict(type="str", required=False, aliases=["unit"]), + space_type=dict(type="str", required=False, aliases=["unit"], choices=["K", "M", "G", "CYL", "TRK"]), volume=dict(type="str", required=False), full_volume=dict(type="bool", default=False), temp_volume=dict(type="str", required=False, aliases=["dest_volume"]), diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 8099f00e0..89cf4db51 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -1,5 +1,4 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 8099f00e0..89cf4db51 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -1,5 +1,4 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 8099f00e0..89cf4db51 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -1,5 +1,4 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 9824b0925f2777ad641f54a17f407e924a2f1936 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 6 Feb 2024 14:09:52 -0600 Subject: [PATCH 295/413] Cherry picked removed hard coded content from staging-v1.9.0-beta.1 (#1194) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Cherry picked removed hard coded content from staging-v1.9.0-beta.1 Added preferred volumes changes [v1.10.0] [zos_copy] Enhance zos_copy performance when copying multiple PDS members (#1183) * [v1.9.0] [zos_copy] Enhancement/764/copy members (#1176) * Changed member copy into a bulk call * Modified copy to members * Cleaned code * Removed hardcoded content from zos_copy tests * Added fix for uss files * Added distinction between uss and mvs * Added alias fix * Moved the copy section to below * Modified for seq test * Added fix for copy dest lock * Added msgs for debugging * Added final changes to member copy * Added copy for when seq to pdse * Add changelog * Added a line into docs * Modified doc * Modified doc * Update changelog * Created a new changelog * Corrected typo Fix for empty volumes on test_config Added comment to remind uncomment test case in the future Add more validation Add more validation Add more validation Add to config volumes and remove upper case variable name * Added keyword into tests --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- tests/conftest.py | 12 +- tests/functional/modules/test_zos_apf_func.py | 432 ++++++++++++------ .../modules/test_zos_archive_func.py | 270 ++++++----- .../modules/test_zos_backup_restore.py | 191 ++++---- .../modules/test_zos_blockinfile_func.py | 118 ++--- .../functional/modules/test_zos_copy_func.py | 235 +++++----- .../modules/test_zos_data_set_func.py | 260 ++++++----- .../modules/test_zos_encode_func.py | 200 +++++--- .../functional/modules/test_zos_fetch_func.py | 92 ++-- .../functional/modules/test_zos_find_func.py | 17 +- .../modules/test_zos_job_query_func.py | 6 +- .../modules/test_zos_job_submit_func.py | 56 ++- .../modules/test_zos_lineinfile_func.py | 111 ++--- .../functional/modules/test_zos_mount_func.py | 57 ++- .../modules/test_zos_mvs_raw_func.py | 273 ++++++----- .../modules/test_zos_tso_command_func.py | 15 +- .../modules/test_zos_unarchive_func.py | 223 ++++----- tests/helpers/dataset.py | 48 ++ tests/helpers/volumes.py | 121 +++++ 19 files changed, 1649 insertions(+), 1088 deletions(-) create mode 100644 tests/helpers/dataset.py create mode 100644 tests/helpers/volumes.py diff --git a/tests/conftest.py b/tests/conftest.py index 506214f29..c8513ad37 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,9 +12,9 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type - import pytest from ibm_zos_core.tests.helpers.ztest import ZTestHelper +from ibm_zos_core.tests.helpers.volumes import get_volumes import sys from mock import MagicMock import importlib @@ -84,6 +84,14 @@ def ansible_zos_module(request, z_python_interpreter): except Exception: pass + # Call of the class by the class ls_Volume (volumes.py file) as many times needed + # one time the array is filled +@pytest.fixture(scope="session") +def volumes_on_systems(ansible_zos_module, request): + """ Call the pytest-ansible plugin to check volumes on the system and work properly a list by session.""" + path = request.config.getoption("--zinventory") + list_Volumes = get_volumes(ansible_zos_module, path) + yield list_Volumes # * We no longer edit sys.modules directly to add zoautil_py mock # * because automatic teardown is not performed, leading to mock pollution @@ -108,4 +116,4 @@ def perform_imports(imports): newimp = [importlib.import_module(x) for x in imports] return newimp - yield (mocker, perform_imports) + yield (mocker, perform_imports) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index f53ee7592..3c3d96ab2 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -12,6 +12,8 @@ # limitations under the License. from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.volumes import Volume_Handler from shellescape import quote from pprint import pprint import os @@ -20,37 +22,6 @@ __metaclass__ = type - -TEST_INFO = dict( - test_add_del=dict( - library="", state="present", force_dynamic=True - ), - test_add_del_with_tmp_hlq_option=dict( - library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict( - data_set_name="", backup=True - ) - ), - test_add_del_volume=dict( - library="", volume=" ", state="present", force_dynamic=True - ), - test_add_del_persist=dict( - library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ), - test_add_del_volume_persist=dict( - library="", volume=" ", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ), - test_batch_add_del=dict( - batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], - persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ), - test_operation_list=dict( - operation="list" - ), - test_operation_list_with_filter=dict( - operation="list", library="" - ) -) - add_expected = """/*BEGINAPFLIST*/ /*BEGINBLOCK*/ APFADDDSNAME({0})VOLUME({1}) @@ -74,63 +45,40 @@ del_expected = """/*BEGINAPFLIST*/ /*ENDAPFLIST*/""" - -def run_shell_cmd(hosts, cmdStr): - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - out = result.get("stdout") - return out - - -def persistds_create(hosts): - cmdStr = "mvstmp APFTEST.PRST" - prstds = run_shell_cmd(hosts, cmdStr)[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - run_shell_cmd(hosts, cmdStr) - return prstds - - -def persistds_delele(hosts, ds): - cmdStr = "drm {0}".format(ds) - run_shell_cmd(hosts, cmdStr) - - -def set_test_env(hosts, test_info): - # results = hosts.all.zos_data_set(name=ds, type="SEQ") - cmdStr = "mvstmp APFTEST" - ds = run_shell_cmd(hosts, cmdStr)[:25] - cmdStr = "dtouch -tseq {0}".format(ds) - run_shell_cmd(hosts, cmdStr) - test_info['library'] = ds - if test_info.get('volume'): - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - vol = run_shell_cmd(hosts, cmdStr) - test_info['volume'] = vol - if test_info.get('persistent'): - test_info['persistent']['data_set_name'] = persistds_create(hosts) - - def clean_test_env(hosts, test_info): - # hosts.all.zos_data_set(name=test_info['library'], state='absent') cmdStr = "drm {0}".format(test_info['library']) - run_shell_cmd(hosts, cmdStr) + hosts.all.shell(cmd=cmdStr) if test_info.get('persistent'): - # hosts.all.zos_data_set(name=test_info['persistent']['data_set_name'], state='absent') - persistds_delele(hosts, test_info['persistent']['data_set_name']) + cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) + hosts.all.shell(cmd=cmdStr) def test_add_del(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] - set_test_env(hosts, test_info) + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 clean_test_env(hosts, test_info) @@ -139,17 +87,32 @@ def test_add_del(ansible_zos_module): def test_add_del_with_tmp_hlq_option(ansible_zos_module): hosts = ansible_zos_module tmphlq = "TMPHLQ" - test_info = TEST_INFO['test_add_del_with_tmp_hlq_option'] + test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) test_info['tmp_hlq'] = tmphlq - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 assert result.get("backup_name")[:6] == tmphlq test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 clean_test_env(hosts, test_info) @@ -157,15 +120,30 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module): def test_add_del_volume(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_volume'] - set_test_env(hosts, test_info) + test_info = dict(library="", volume="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 clean_test_env(hosts, test_info) @@ -200,65 +178,111 @@ def test_add_del_persist(ansible_zos_module): def test_add_del_volume_persist(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_volume_persist'] - set_test_env(hosts, test_info) + test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 add_exptd = add_expected.format(test_info['library'], test_info['volume']) add_exptd = add_exptd.replace(" ", "") cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") assert actual == add_exptd test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 del_exptd = del_expected.replace(" ", "") cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") assert actual == del_exptd clean_test_env(hosts, test_info) - -def test_batch_add_del(ansible_zos_module): - hosts = ansible_zos_module - test_info = TEST_INFO['test_batch_add_del'] - for item in test_info['batch']: - set_test_env(hosts, item) - test_info['persistent']['data_set_name'] = persistds_create(hosts) - results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("rc") == 0 - add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], - test_info['batch'][1]['library'], test_info['batch'][1]['volume'], - test_info['batch'][2]['library'], test_info['batch'][2]['volume']) - add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") - assert actual == add_exptd - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") - assert actual == del_exptd - for item in test_info['batch']: - clean_test_env(hosts, item) - persistds_delele(hosts, test_info['persistent']['data_set_name']) +""" +keyword: ENABLE-FOR-1-3 +Test commented because there is a failure in ZOAU 1.2.x, that should be fixed in 1.3.x, so +whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 +should uncomment this test as part of the validation process. +""" +#def test_batch_add_del(ansible_zos_module): +# hosts = ansible_zos_module +# test_info = dict( +# batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], +# persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True +# ) +# for item in test_info['batch']: +# ds = get_tmp_ds_name(1,1) +# hosts.all.shell(cmd="dtouch {0}".format(ds)) +# item['library'] = ds +# cmdStr = "dls -l " + ds + " | awk '{print $5}' " +# results = hosts.all.shell(cmd=cmdStr) +# for result in results.contacted.values(): +# vol = result.get("stdout") +# item['volume'] = vol +# prstds = get_tmp_ds_name(5,5) +# cmdStr = "dtouch {0}".format(prstds) +# hosts.all.shell(cmd=cmdStr) +# test_info['persistent']['data_set_name'] = prstds +# hosts.all.shell(cmd="echo \"{0}\" > {1}".format("Hello World, Here's Jhonny", prstds)) +# results = hosts.all.zos_apf(**test_info) +# pprint(vars(results)) +# for result in results.contacted.values(): +# assert result.get("rc") == 0 +# add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], +# test_info['batch'][1]['library'], test_info['batch'][1]['volume'], +# test_info['batch'][2]['library'], test_info['batch'][2]['volume']) +# add_exptd = add_exptd.replace(" ", "") +# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) +# results = hosts.all.shell(cmd=cmdStr) +# for result in results.contacted.values(): +# actual = result.get("stdout") +# actual = actual.replace(" ", "") +# assert actual == add_exptd +# test_info['state'] = 'absent' +# results = hosts.all.zos_apf(**test_info) +# pprint(vars(results)) +# for result in results.contacted.values(): +# assert result.get("rc") == 0 +# del_exptd = del_expected.replace(" ", "") +# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) +# results = hosts.all.shell(cmd=cmdStr) +# for result in results.contacted.values(): +# actual = result.get("stdout") +# actual = actual.replace(" ", "") +# assert actual == del_exptd +# for item in test_info['batch']: +# clean_test_env(hosts, item) +# cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) +# hosts.all.shell(cmd=cmdStr) def test_operation_list(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_operation_list'] + test_info = dict(operation="list") results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): listJson = result.get("stdout") import json @@ -269,14 +293,30 @@ def test_operation_list(ansible_zos_module): def test_operation_list_with_filter(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] + test_info = dict(library="", state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds hosts.all.zos_apf(**test_info) - ti = TEST_INFO['test_operation_list_with_filter'] + ti = dict(operation="list", library="") ti['library'] = "APFTEST.*" results = hosts.all.zos_apf(**ti) - pprint(vars(results)) for result in results.contacted.values(): listFiltered = result.get("stdout") assert test_info['library'] in listFiltered @@ -291,15 +331,30 @@ def test_operation_list_with_filter(ansible_zos_module): def test_add_already_present(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] + test_info = dict(library="", state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -310,11 +365,27 @@ def test_add_already_present(ansible_zos_module): def test_del_not_present(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] - set_test_env(hosts, test_info) + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -323,10 +394,9 @@ def test_del_not_present(ansible_zos_module): def test_add_not_found(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] + test_info = dict(library="", state="present", force_dynamic=True) test_info['library'] = 'APFTEST.FOO.BAR' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -334,12 +404,28 @@ def test_add_not_found(ansible_zos_module): def test_add_with_wrong_volume(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_volume'] + test_info = dict(library="", volume="", state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['volume'] = 'T12345' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -348,13 +434,29 @@ def test_add_with_wrong_volume(ansible_zos_module): def test_persist_invalid_ds_format(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) - run_shell_cmd(hosts, cmdStr) + hosts.all.shell(cmd=cmdStr) results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 8 clean_test_env(hosts, test_info) @@ -362,12 +464,28 @@ def test_persist_invalid_ds_format(ansible_zos_module): def test_persist_invalid_marker(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "# Invalid marker format" results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 4 clean_test_env(hosts, test_info) @@ -375,12 +493,28 @@ def test_persist_invalid_marker(ansible_zos_module): def test_persist_invalid_marker_len(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("msg") == 'marker length may not exceed 72 characters' - clean_test_env(hosts, test_info) + clean_test_env(hosts, test_info) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 32bedb4fe..a9bfd658c 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -14,6 +14,7 @@ from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name import time import pytest @@ -22,13 +23,9 @@ SHELL_EXECUTABLE = "/bin/sh" USS_TEMP_DIR = "/tmp/archive" USS_TEST_FILES = { f"{USS_TEMP_DIR}/foo.txt" : "foo sample content", - f"{USS_TEMP_DIR}/bar.txt": "bar sample content", + f"{USS_TEMP_DIR}/bar.txt": "bar sample content", f"{USS_TEMP_DIR}/empty.txt":""} USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" -TEST_PS = "USER.PRIVATE.TESTDS" -TEST_PDS = "USER.PRIVATE.TESTPDS" -HLQ = "USER" -MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" USS_DEST_ARCHIVE = "testarchive.dzp" @@ -331,6 +328,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): - test_mvs_archive_multiple_data_sets_with_missing """ +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -338,9 +336,9 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( @@ -352,12 +350,15 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + src_data_set = get_tmp_ds_name() + archive_data_set = get_tmp_ds_name() + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -368,7 +369,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -380,33 +381,33 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) if format == "terse": format_dict["format_options"] = dict(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -414,9 +415,9 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( @@ -428,12 +429,15 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name() + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -444,7 +448,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -456,9 +460,9 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -466,23 +470,24 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -490,20 +495,23 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name() + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", record_format="FB", @@ -513,7 +521,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -521,34 +529,36 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d test_line = "this is a test line" for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) if format == "terse": format_dict["format_options"] = dict(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, remove=True, ) # assert response is positive for result in archive_result.contacted.values(): + print(result) assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") - assert data_set.get("name") != c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") + assert src_data_set != c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -556,17 +566,19 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -591,25 +603,25 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), - dest=MVS_DEST_ARCHIVE, + src="{0}*".format(src_data_set), + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set for ds in target_ds_list: assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -617,17 +629,19 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -651,10 +665,10 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) - exclude = "{0}1".format(data_set.get("name")) + exclude = "{0}1".format(src_data_set) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), - dest=MVS_DEST_ARCHIVE, + src="{0}*".format(src_data_set), + dest=archive_data_set, format=format_dict, exclude=exclude, ) @@ -662,7 +676,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set for ds in target_ds_list: if ds.get("name") == exclude: assert exclude not in result.get("archived") @@ -670,12 +684,12 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -683,17 +697,19 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -718,8 +734,8 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), - dest=MVS_DEST_ARCHIVE, + src="{0}*".format(src_data_set), + dest=archive_data_set, format=format_dict, remove=True, ) @@ -727,18 +743,18 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") for ds in target_ds_list: assert ds.get("name") in result.get("archived") assert ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -746,17 +762,19 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -777,7 +795,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) # Remove ds to make sure is missing - missing_ds = data_set.get("name")+"1" + missing_ds = src_data_set+"1" hosts.all.zos_data_set(name=missing_ds, state="absent") path_list = [ds.get("name") for ds in target_ds_list] @@ -787,14 +805,14 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, - dest=MVS_DEST_ARCHIVE, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set assert result.get("dest_state") == STATE_INCOMPLETE assert missing_ds in result.get("missing") for ds in target_ds_list: @@ -804,12 +822,13 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -817,20 +836,23 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) -def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set,): +def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", replace=True, @@ -839,7 +861,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -847,9 +869,9 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ test_line = "this is a test line" for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -870,19 +892,19 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ time.sleep(5) archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: # extract pid @@ -893,5 +915,5 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) # clean up c code/object/executable files, jcl hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 6231e0902..1b44ec124 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -15,19 +15,12 @@ __metaclass__ = type +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name import pytest from re import search, IGNORECASE, MULTILINE import string import random -VOLUME = "222222" -VOLUME2 = "222222" -VOLUME_TO_BACKUP = VOLUME -BIG_VOLUME = "DSHRL1" -BIG_VOLUME2 = "DSHRL2" -DATA_SET_NAME = "USER.PRIVATE.TESTDS" -DATA_SET_NAME2 = "USER.PRIVATE.TESTDS2" -DATA_SET_PATTERN = "USER.PRIVATE.*" DATA_SET_CONTENTS = "HELLO world" DATA_SET_QUALIFIER = "{0}.PRIVATE.TESTDS" DATA_SET_QUALIFIER2 = "{0}.PRIVATE.TESTDS2" @@ -137,6 +130,7 @@ def assert_data_set_or_file_does_not_exist(hosts, name): def assert_data_set_exists(hosts, data_set_name): results = hosts.all.shell("dls '{0}'".format(data_set_name.upper())) for result in results.contacted.values(): + print(result) found = search( "^{0}$".format(data_set_name), result.get("stdout"), IGNORECASE | MULTILINE ) @@ -183,7 +177,7 @@ def assert_file_does_not_exist(hosts, path): # Start of tests # # ---------------------------------------------------------------------------- # - +@pytest.mark.ds @pytest.mark.parametrize( "backup_name,overwrite,recover", [ @@ -199,16 +193,17 @@ def assert_file_does_not_exist(hosts, path): ) def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: if not overwrite: delete_data_set_or_file(hosts, backup_name) - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=overwrite, recover=recover, @@ -216,7 +211,7 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) assert_module_did_not_fail(results) assert_data_set_or_file_exists(hosts, backup_name) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -233,15 +228,16 @@ def test_backup_of_data_set_when_backup_dest_exists( ansible_zos_module, backup_name, overwrite ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: create_data_set_or_file_with_contents(hosts, backup_name, DATA_SET_CONTENTS) assert_data_set_or_file_exists(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=overwrite, ) @@ -251,7 +247,7 @@ def test_backup_of_data_set_when_backup_dest_exists( assert_module_failed(results) assert_data_set_or_file_exists(hosts, backup_name) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -272,16 +268,16 @@ def test_backup_and_restore_of_data_set( ansible_zos_module, backup_name, overwrite, recover ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=overwrite, recover=recover, @@ -295,9 +291,8 @@ def test_backup_and_restore_of_data_set( overwrite=overwrite, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) @@ -321,16 +316,16 @@ def test_backup_and_restore_of_data_set_various_space_measurements( ansible_zos_module, backup_name, space, space_type ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) args = dict( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=True, space=space, @@ -351,9 +346,8 @@ def test_backup_and_restore_of_data_set_various_space_measurements( args["space_type"] = space_type results = hosts.all.zos_backup_restore(**args) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) @@ -371,16 +365,16 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( ansible_zos_module, backup_name, overwrite ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, ) assert_module_did_not_fail(results) @@ -391,7 +385,6 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( hlq=NEW_HLQ, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, @@ -403,35 +396,67 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( else: assert_module_failed(results) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) -@pytest.mark.parametrize( - "data_set_include", - [ - [DATA_SET_NAME, DATA_SET_NAME2], - DATA_SET_PATTERN, - ], -) -def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module, data_set_include): +def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_name2 = get_tmp_ds_name() + data_set_include = [data_set_name, data_set_name2] try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) + delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS + ) + create_sequential_data_set_with_contents( + hosts, data_set_name2, DATA_SET_CONTENTS + ) + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_include), + backup_name=DATA_SET_BACKUP_LOCATION, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=DATA_SET_BACKUP_LOCATION, + overwrite=True, + recover=True, + hlq=NEW_HLQ, + ) + assert_module_did_not_fail(results) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + + +def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_name2 = get_tmp_ds_name() + data_sets_hlq = "ANSIBLE.**" + try: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) + delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME2, DATA_SET_CONTENTS + hosts, data_set_name2, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=data_set_include), + data_sets=dict(include=data_sets_hlq), backup_name=DATA_SET_BACKUP_LOCATION, ) assert_module_did_not_fail(results) @@ -444,11 +469,10 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module, data_set_i hlq=NEW_HLQ, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION2) + assert_data_set_exists(hosts, DATA_SET_BACKUP_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -456,21 +480,23 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module, data_set_i def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_name2 = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME2, DATA_SET_CONTENTS + hosts, data_set_name2, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_PATTERN, exclude=DATA_SET_NAME2), + data_sets=dict(include="ANSIBLE.**", exclude=data_set_name2), backup_name=DATA_SET_BACKUP_LOCATION, ) assert_module_did_not_fail(results) @@ -483,11 +509,11 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): hlq=NEW_HLQ, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) + assert_data_set_exists(hosts, DATA_SET_BACKUP_LOCATION) assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -534,32 +560,34 @@ def test_backup_of_data_set_when_data_set_does_not_exist( ansible_zos_module, backup_name ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, ) assert_module_failed(results) assert_data_set_or_file_does_not_exist(hosts, backup_name) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), # volume=get_unused_volume_serial(hosts), volume="@@@@", backup_name=DATA_SET_BACKUP_LOCATION, @@ -567,22 +595,23 @@ def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): assert_module_failed(results) assert_data_set_does_not_exist(hosts, DATA_SET_BACKUP_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=DATA_SET_BACKUP_LOCATION, ) assert_module_did_not_fail(results) @@ -597,7 +626,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): assert_module_failed(results) assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -606,15 +635,15 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # hosts = ansible_zos_module # try: # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_NAME) -# delete_data_set_or_file(hosts, DATA_SET_NAME2) +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, data_set_name2) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) # create_sequential_data_set_with_contents( -# hosts, DATA_SET_NAME, DATA_SET_CONTENTS, VOLUME +# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME # ) # create_sequential_data_set_with_contents( -# hosts, DATA_SET_NAME2, DATA_SET_CONTENTS, VOLUME2 +# hosts, data_set_name2, DATA_SET_CONTENTS, VOLUME2 # ) # results = hosts.all.zos_backup_restore( # operation="backup", @@ -636,8 +665,8 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) # assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) # finally: -# delete_data_set_or_file(hosts, DATA_SET_NAME) -# delete_data_set_or_file(hosts, DATA_SET_NAME2) +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, data_set_name2) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -647,9 +676,9 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # hosts = ansible_zos_module # try: # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_NAME) +# delete_data_set_or_file(hosts, data_set_name) # create_sequential_data_set_with_contents( -# hosts, DATA_SET_NAME, DATA_SET_CONTENTS, VOLUME +# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME # ) # results = hosts.all.zos_backup_restore( # operation="backup", @@ -663,7 +692,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # ) # assert_module_did_not_fail(results) # assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_NAME) +# delete_data_set_or_file(hosts, data_set_name) # results = hosts.all.zos_backup_restore( # operation="restore", # backup_name=DATA_SET_BACKUP_LOCATION, @@ -675,7 +704,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # space_type="M", # ) # assert_module_did_not_fail(results) -# assert_data_set_exists_on_volume(hosts, DATA_SET_NAME, VOLUME) +# assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME) # finally: -# delete_data_set_or_file(hosts, DATA_SET_NAME) +# delete_data_set_or_file(hosts, data_set_name) # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index b2e567dc1..39d04639f 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -12,15 +12,16 @@ # limitations under the License. from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name from shellescape import quote import time import re import pytest import inspect +import os __metaclass__ = type -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" TEST_FOLDER_BLOCKINFILE = "/tmp/ansible-core-tests/zos_blockinfile/" c_pgm="""#include <stdio.h> @@ -939,9 +940,8 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") - test_name = "DST1" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -962,9 +962,8 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") - test_name = "DST2" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -985,9 +984,8 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") - test_name = "DST3" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1008,9 +1006,8 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") - test_name = "DST4" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1031,9 +1028,8 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") - test_name = "DST5" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1054,9 +1050,8 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") - test_name = "DST6" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1077,9 +1072,8 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") - test_name = "DST7" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1100,9 +1094,8 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") - test_name = "DST8" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1123,9 +1116,8 @@ def test_ds_block_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(block="", state="absent") - test_name = "DST9" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1147,17 +1139,10 @@ def test_ds_tmp_hlq_option(ansible_zos_module): ds_type = "SEQ" params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") kwargs = dict(backup_name=r"TMPHLQ\..") - test_name = "DST10" - temp_file = "/tmp/zos_lineinfile/" + test_name content = TEST_CONTENT try: - hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - if len(hlq) > 8: - hlq = hlq[:8] - ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + ds_full_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_full_name hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) @@ -1182,9 +1167,8 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) - test_name = "DST11" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1209,9 +1193,8 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) if backup_name: params["backup_name"] = backup_name - test_name = "DST12" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1238,21 +1221,22 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="",insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=True) MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT if ds_type == "SEQ": - params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name+"({0})".format(MEMBER_2) try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -1271,7 +1255,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): hosts.all.file(path="/tmp/disp_shr/", state="directory") hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + call_c_jcl.format(default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") @@ -1289,7 +1273,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") ######################### # Encoding tests @@ -1301,7 +1285,7 @@ def test_uss_encoding(ansible_zos_module, encoding): insert_data = "Insert this string" params = dict(insertafter="SIMPLE", block=insert_data, state="present") params["encoding"] = encoding - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = TEST_FOLDER_BLOCKINFILE + encoding content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) @@ -1318,6 +1302,7 @@ def test_uss_encoding(ansible_zos_module, encoding): finally: remove_uss_environment(ansible_zos_module) + @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) @pytest.mark.parametrize("encoding", ["IBM-1047"]) @@ -1327,9 +1312,8 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): insert_data = "Insert this string" params = dict(insertafter="SIMPLE", block=insert_data, state="present") params["encoding"] = encoding - test_name = "DST13" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) @@ -1354,6 +1338,8 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): assert result.get("stdout") == EXPECTED_ENCODING finally: remove_ds_environment(ansible_zos_module, ds_name) + + ######################### # Negative tests ######################### @@ -1375,9 +1361,8 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): ds_type = 'SEQ' params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' - test_name = "DST13" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1409,14 +1394,10 @@ def test_ds_not_supported(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") - test_name = "DST14" - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name try: - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - ds_name = test_name.upper() + "." + ds_type + ds_name = ds_name.upper() + "." + ds_type results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') for result in results.contacted.values(): assert result.get("changed") is True @@ -1434,18 +1415,19 @@ def test_ds_not_supported(ansible_zos_module, dstype): def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="", insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=False) MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name+"({0})".format(MEMBER_2) content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -1460,7 +1442,7 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts.all.file(path="/tmp/disp_shr/", state="directory") hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + call_c_jcl.format(default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") @@ -1475,4 +1457,4 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file + hosts.all.zos_data_set(name=default_data_set_name, state="absent") diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 2cc11c9dd..1cb3cb7cb 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -22,7 +22,8 @@ from tempfile import mkstemp import subprocess - +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -229,6 +230,7 @@ //STDERR DD SYSOUT=* //""" + def populate_dir(dir_path): for i in range(5): with open(dir_path + "/" + "file" + str(i + 1), "w") as infile: @@ -1557,7 +1559,7 @@ def test_copy_template_file_with_non_default_markers(ansible_zos_module): @pytest.mark.template def test_copy_template_file_to_dataset(ansible_zos_module): hosts = ansible_zos_module - dest_dataset = "USER.TEST.TEMPLATE" + dest_dataset = get_tmp_ds_name() temp_dir = tempfile.mkdtemp() try: @@ -1610,7 +1612,7 @@ def test_copy_asa_file_to_asa_sequential(ansible_zos_module): hosts = ansible_zos_module try: - dest = "USER.ASA.SEQ" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") copy_result = hosts.all.zos_copy( @@ -1644,7 +1646,7 @@ def test_copy_asa_file_to_asa_partitioned(ansible_zos_module): hosts = ansible_zos_module try: - dest = "USER.ASA.PDSE" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") full_dest = "{0}(TEST)".format(dest) @@ -1678,7 +1680,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.SEQ" + src = get_tmp_ds_name() hosts.all.zos_data_set( name=src, state="present", @@ -1686,7 +1688,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.SEQ" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_copy( @@ -1727,7 +1729,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.SEQ" + src = get_tmp_ds_name() hosts.all.zos_data_set( name=src, state="present", @@ -1735,7 +1737,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.PDSE" + dest = get_tmp_ds_name() full_dest = "{0}(MEMBER)".format(dest) hosts.all.zos_data_set(name=dest, state="absent") @@ -1777,7 +1779,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.PDSE" + src = get_tmp_ds_name() full_src = "{0}(MEMBER)".format(src) hosts.all.zos_data_set( name=src, @@ -1786,7 +1788,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.SEQ" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_copy( @@ -1827,7 +1829,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.PDSE" + src = get_tmp_ds_name() full_src = "{0}(MEMBER)".format(src) hosts.all.zos_data_set( name=src, @@ -1836,7 +1838,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.PDSE" + dest = get_tmp_ds_name() full_dest = "{0}(MEMBER)".format(dest) hosts.all.zos_data_set(name=dest, state="absent") @@ -1878,7 +1880,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.ASA.SRC" + src = get_tmp_ds_name() hosts.all.zos_data_set( name=src, state="present", @@ -1960,19 +1962,20 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq @pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"]) def test_copy_dest_lock(ansible_zos_module, ds_type): - DATASET_1 = "USER.PRIVATE.TESTDS" - DATASET_2 = "ADMI.PRIVATE.TESTDS" - MEMBER_1 = "MEM1" + hosts = ansible_zos_module + data_set_1 = get_tmp_ds_name() + data_set_2 = get_tmp_ds_name() + member_1 = "MEM1" if ds_type == "PDS" or ds_type == "PDSE": - src_data_set = DATASET_1 + "({0})".format(MEMBER_1) - dest_data_set = DATASET_2 + "({0})".format(MEMBER_1) + src_data_set = data_set_1 + "({0})".format(member_1) + dest_data_set = data_set_2 + "({0})".format(member_1) else: - src_data_set = DATASET_1 - dest_data_set = DATASET_2 + src_data_set = data_set_1 + dest_data_set = data_set_2 try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DATASET_1, state="present", type=ds_type, replace=True) - hosts.all.zos_data_set(name=DATASET_2, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) @@ -2025,15 +2028,15 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): # clean up c code/object/executable files, jcl hosts.all.shell(cmd='rm -r /tmp/disp_shr') # remove pdse - hosts.all.zos_data_set(name=DATASET_1, state="absent") - hosts.all.zos_data_set(name=DATASET_2, state="absent") + hosts.all.zos_data_set(name=data_set_1, state="absent") + hosts.all.zos_data_set(name=data_set_2, state="absent") @pytest.mark.uss @pytest.mark.seq def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() fd, src = tempfile.mkstemp() os.close(fd) @@ -2086,7 +2089,7 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): @pytest.mark.seq def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() fd, src = tempfile.mkstemp() os.close(fd) @@ -2141,7 +2144,7 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): @pytest.mark.seq def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() fd, src = tempfile.mkstemp() os.close(fd) @@ -2172,7 +2175,7 @@ def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module): hosts = ansible_zos_module src = "/tmp/zos_copy_binary_file" - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -2221,7 +2224,7 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) ]) def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -2260,7 +2263,7 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): ]) def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present") @@ -2288,7 +2291,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): ]) def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="absent") @@ -2400,7 +2403,7 @@ def test_copy_ps_to_existing_uss_dir(ansible_zos_module): def test_copy_ps_to_non_existing_ps(ansible_zos_module): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -2426,7 +2429,7 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): def test_copy_ps_to_empty_ps(ansible_zos_module, force): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present") @@ -2452,7 +2455,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="absent") @@ -2483,7 +2486,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="absent") @@ -2514,7 +2517,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): def test_backup_sequential_data_set(ansible_zos_module, backup): hosts = ansible_zos_module src = "/etc/profile" - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present") @@ -2556,7 +2559,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): ]) def test_copy_file_to_non_existing_member(ansible_zos_module, src): hosts = ansible_zos_module - data_set = "USER.TEST.PDS.FUNCTEST" + data_set = get_tmp_ds_name() dest = "{0}(PROFILE)".format(data_set) try: @@ -2602,7 +2605,7 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): ]) def test_copy_file_to_existing_member(ansible_zos_module, src): hosts = ansible_zos_module - data_set = "USER.TEST.PDS.FUNCTEST" + data_set = get_tmp_ds_name() dest = "{0}(PROFILE)".format(data_set) try: @@ -2653,9 +2656,9 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module - src_data_set = "USER.TEST.PDS.SOURCE" + src_data_set = get_tmp_ds_name() src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) - dest_data_set = "USER.TEST.PDS.FUNCTEST" + dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: @@ -2700,9 +2703,9 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module - src_data_set = "USER.TEST.PDS.SOURCE" + src_data_set = get_tmp_ds_name() src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) - dest_data_set = "USER.TEST.PDS.FUNCTEST" + dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: @@ -2746,7 +2749,7 @@ def test_copy_data_set_to_existing_member(ansible_zos_module, args): @pytest.mark.parametrize("is_remote", [False, True]) def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): hosts = ansible_zos_module - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() dest_path = "{0}(PROFILE)".format(dest) src_file = "/etc/profile" @@ -2775,7 +2778,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): def test_copy_dir_to_non_existing_pdse(ansible_zos_module): hosts = ansible_zos_module src_dir = "/tmp/testdir" - dest = "USER.TEST.PDSE.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.file(path=src_dir, state="directory") @@ -2804,7 +2807,7 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): @pytest.mark.pdse def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.PDSE.FUNCTEST" + dest = get_tmp_ds_name() temp_path = tempfile.mkdtemp() src_basename = "source/" @@ -2839,7 +2842,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.file(path=src_dir, state="directory") @@ -2877,9 +2880,9 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module - src_data_set = "USER.TEST.PDS.SOURCE" + src_data_set = get_tmp_ds_name() src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) - dest_data_set = "USER.TEST.PDS.FUNCTEST" + dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: @@ -2922,8 +2925,8 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module - src = "USER.TEST.PDS.SRC" - dest = "USER.TEST.PDS.DEST" + src = get_tmp_ds_name() + dest = get_tmp_ds_name() try: populate_partitioned_data_set(hosts, src, args["src_type"]) @@ -2957,11 +2960,12 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_created): hosts = ansible_zos_module # This dataset and member should be available on any z/OS system. - cobol_src_pds = "USER.COBOL.SRC" + mlq_size = 3 + cobol_src_pds = get_tmp_ds_name(mlq_size) cobol_src_mem = "HELLOCBL" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + src_lib = get_tmp_ds_name(mlq_size) + dest_lib = get_tmp_ds_name(mlq_size) + dest_lib_aliases = get_tmp_ds_name(mlq_size) pgm_mem = "HELLO" pgm_mem_alias = "ALIAS1" try: @@ -3091,14 +3095,14 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr @pytest.mark.uss def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_s=3 + cobol_src_pds = get_tmp_ds_name(mlq_s) cobol_src_mem = "HELLOCBL" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" + src_lib = get_tmp_ds_name(mlq_s) + dest_lib = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + dest_lib_aliases = get_tmp_ds_name(mlq_s) pgm_mem_alias = "ALIAS1" uss_dest = "/tmp/HELLO" @@ -3241,19 +3245,17 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_size = 3 + cobol_src_pds = get_tmp_ds_name(mlq_size) cobol_src_mem = "HELLOCBL" cobol_src_mem2 = "HICBL2" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + src_lib = get_tmp_ds_name(mlq_size) + dest_lib = get_tmp_ds_name(mlq_size) + dest_lib_aliases = get_tmp_ds_name(mlq_size) pgm_mem = "HELLO" pgm2_mem = "HELLO2" pgm_mem_alias = "ALIAS1" pgm2_mem_alias = "ALIAS2" - - try: # allocate pds for cobol src code hosts.all.zos_data_set( @@ -3435,12 +3437,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): @pytest.mark.parametrize("is_created", [False, True]) def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_s = 3 + cobol_src_pds = get_tmp_ds_name(mlq_s) cobol_src_mem = "HELLOCBL" cobol_src_mem2 = "HICBL2" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" + src_lib = get_tmp_ds_name(mlq_s) + dest_lib = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" pgm2_mem = "HELLO2" uss_location = "/tmp/loadlib" @@ -3593,13 +3595,13 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_s=3 + cobol_src_pds = get_tmp_ds_name(mlq_s) cobol_src_mem = "HELLOCBL" cobol_src_mem2 = "HICBL2" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + src_lib = get_tmp_ds_name(mlq_s) + dest_lib = get_tmp_ds_name(mlq_s) + dest_lib_aliases = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" pgm2_mem = "HELLO2" pgm_mem_alias = "ALIAS1" @@ -3781,7 +3783,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") hosts.all.file(path=uss_dir_path, state="absent") - +#Special case to call a program @pytest.mark.uss def test_copy_executables_uss_to_uss(ansible_zos_module): hosts= ansible_zos_module @@ -3815,8 +3817,9 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts= ansible_zos_module src= "/tmp/c/hello_world.c" + mlq_size = 3 src_jcl_call= "/tmp/c/call_hw_pgm.jcl" - dest = "USER.LOAD.DEST" + dest = get_tmp_ds_name(mlq_size) member = "HELLOSRC" try: generate_executable_uss(hosts, src, src_jcl_call) @@ -3856,7 +3859,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): @pytest.mark.pdse -def test_copy_pds_member_with_system_symbol(ansible_zos_module,): +def test_copy_pds_member_with_system_symbol(ansible_zos_module): """This test is for bug #543 in GitHub. In some versions of ZOAU, datasets.listing can't handle system symbols in volume names and therefore fails to get details from a dataset. @@ -3869,7 +3872,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module,): # The volume for this dataset should use a system symbol. # This dataset and member should be available on any z/OS system. src = "SYS1.SAMPLIB(IZUPRM00)" - dest = "USER.TEST.PDS.DEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set( @@ -3903,10 +3906,10 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module,): @pytest.mark.pdse def test_copy_multiple_data_set_members(ansible_zos_module): hosts = ansible_zos_module - src = "USER.FUNCTEST.SRC.PDS" + src = get_tmp_ds_name() src_wildcard = "{0}(ABC*)".format(src) - dest = "USER.FUNCTEST.DEST.PDS" + dest = get_tmp_ds_name() member_list = ["MEMBER1", "ABCXYZ", "ABCASD"] ds_list = ["{0}({1})".format(src, member) for member in member_list] @@ -3949,9 +3952,9 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): issue was discovered in https://github.com/ansible-collections/ibm_zos_core/issues/560. """ hosts = ansible_zos_module - src = "USER.FUNCTEST.SRC.PDS" + src = get_tmp_ds_name() - dest = "USER.FUNCTEST.DEST.PDS" + dest = get_tmp_ds_name() member_list = ["MEMBER1", "ABCXYZ", "ABCASD"] src_ds_list = ["{0}({1})".format(src, member) for member in member_list] dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] @@ -3994,7 +3997,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.parametrize("ds_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module - data_set = "USER.TEST.PDSE.SOURCE" + data_set = get_tmp_ds_name() src = "{0}(MEMBER)".format(data_set) dest = "/tmp/member" @@ -4036,7 +4039,7 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module - data_set = "USER.TEST.PDSE.SOURCE" + data_set = get_tmp_ds_name() src = "{0}(MEMBER)".format(data_set) dest = "/tmp/member" @@ -4079,7 +4082,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module - src_ds = "USER.TEST.FUNCTEST" + src_ds = get_tmp_ds_name() dest = "/tmp/" dest_path = "/tmp/{0}".format(src_ds) @@ -4124,7 +4127,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module - src_ds = "USER.TEST.FUNCTEST" + src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) dest = "/tmp/" dest_path = "/tmp/MEMBER" @@ -4170,9 +4173,9 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module - src_ds = "USER.TEST.PDS.SOURCE" + src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -4210,9 +4213,9 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module - src_ds = "USER.TEST.PDS.SOURCE" + src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) @@ -4252,7 +4255,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set( @@ -4299,7 +4302,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module src = tempfile.mkdtemp() - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() members = ["FILE1", "FILE2", "FILE3", "FILE4", "FILE5"] backup_name = None @@ -4341,11 +4344,16 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) -def test_copy_data_set_to_volume(ansible_zos_module, src_type): +def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module - source = "USER.TEST.FUNCTEST.SRC" - dest = "USER.TEST.FUNCTEST.DEST" - source_member = "USER.TEST.FUNCTEST.SRC(MEMBER)" + source = get_tmp_ds_name() + dest = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + if volume_1 == "SCR03": + volume = volumes.get_available_vol() + volumes.free_vol(volume_1) + volume_1 = volume try: hosts.all.zos_data_set(name=source, type=src_type, state='present') hosts.all.zos_data_set(name=source_member, type="member", state='present') @@ -4353,7 +4361,7 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): src=source, dest=dest, remote_src=True, - volume='000000' + volume=volume_1 ) for cp in copy_res.contacted.values(): @@ -4368,7 +4376,7 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): for cv in check_vol.contacted.values(): assert cv.get('rc') == 0 - assert "000000" in cv.get('stdout') + assert volume_1 in cv.get('stdout') finally: hosts.all.zos_data_set(name=source, state='absent') hosts.all.zos_data_set(name=dest, state='absent') @@ -4378,7 +4386,7 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): hosts = ansible_zos_module src_ds = TEST_VSAM_KSDS - dest_ds = "USER.TEST.VSAM.KSDS" + dest_ds = get_tmp_ds_name() try: copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True) @@ -4403,8 +4411,8 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): @pytest.mark.parametrize("force", [False, True]) def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): hosts = ansible_zos_module - src_ds = "USER.TEST.VSAM.SOURCE" - dest_ds = "USER.TEST.VSAM.KSDS" + src_ds = get_tmp_ds_name() + dest_ds = get_tmp_ds_name() try: create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) @@ -4438,8 +4446,8 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): @pytest.mark.parametrize("backup", [None, "USER.TEST.VSAM.KSDS.BACK"]) def test_backup_ksds(ansible_zos_module, backup): hosts = ansible_zos_module - src = "USER.TEST.VSAM.SOURCE" - dest = "USER.TEST.VSAM.KSDS" + src = get_tmp_ds_name() + dest = get_tmp_ds_name() backup_name = None try: @@ -4486,17 +4494,19 @@ def test_backup_ksds(ansible_zos_module, backup): @pytest.mark.vsam -def test_copy_ksds_to_volume(ansible_zos_module): +def test_copy_ksds_to_volume(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module src_ds = TEST_VSAM_KSDS - dest_ds = "USER.TEST.VSAM.KSDS" + dest_ds = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: copy_res = hosts.all.zos_copy( src=src_ds, dest=dest_ds, remote_src=True, - volume="000000" + volume=volume_1 ) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") @@ -4511,16 +4521,17 @@ def test_copy_ksds_to_volume(ansible_zos_module): output = "\n".join(dd_names[0]["content"]) assert "IN-CAT" in output assert re.search(r"\bINDEXED\b", output) - assert re.search(r"\b000000\b", output) + assert re.search(r"\b{0}\b".format(volume_1), output) finally: hosts.all.zos_data_set(name=dest_ds, state="absent") -def test_dest_data_set_parameters(ansible_zos_module): +def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module src = "/etc/profile" - dest = "USER.TEST.DEST" - volume = "000000" + dest = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() space_primary = 3 space_secondary = 2 space_type = "K" @@ -4612,7 +4623,7 @@ def test_ensure_tmp_cleanup(ansible_zos_module): @pytest.mark.parametrize("force", [False, True]) def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option(ansible_zos_module, force): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() src_file = "/etc/profile" tmphlq = "TMPHLQ" try: diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 0a3972646..f5568f55e 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -21,6 +21,8 @@ from pipes import quote from pprint import pprint +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # TODO: determine if data set names need to be more generic for testcases # TODO: add additional tests to check additional data set creation parameter combinations @@ -34,10 +36,6 @@ ("lds"), ] -VOLUME_000000 = "000000" -VOLUME_222222 = "222222" -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" -DEFAULT_DATA_SET_NAME_WITH_MEMBER = "USER.PRIVATE.TESTDS(TESTME)" TEMP_PATH = "/tmp/jcl" ECHO_COMMAND = "echo {0} > {1}/SAMPLE" @@ -47,16 +45,16 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME(USER.PRIVATE.TESTDS) - + DEFINE CLUSTER (NAME({1}) - INDEXED - KEYS(6 1) - RECSZ(80 80) - TRACKS(1,1) - CISZ(4096) - FREESPACE(3 3) - - VOLUMES(000000) ) - - DATA (NAME(USER.PRIVATE.TESTDS.DATA)) - - INDEX (NAME(USER.PRIVATE.TESTDS.INDEX)) + VOLUMES({0}) ) - + DATA (NAME({1}.DATA)) - + INDEX (NAME({1}.INDEX)) /* """ @@ -65,14 +63,14 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME('USER.PRIVATE.TESTDS') - + DEFINE CLUSTER (NAME('{1}') - NUMBERED - RECSZ(80 80) - TRACKS(1,1) - REUSE - FREESPACE(3 3) - - VOLUMES(000000) ) - - DATA (NAME('USER.PRIVATE.TESTDS.DATA')) + VOLUMES({0}) ) - + DATA (NAME('{1}.DATA')) /* """ @@ -81,14 +79,14 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME('USER.PRIVATE.TESTDS') - + DEFINE CLUSTER (NAME('{1}') - NONINDEXED - RECSZ(80 80) - TRACKS(1,1) - CISZ(4096) - FREESPACE(3 3) - - VOLUMES(000000) ) - - DATA (NAME('USER.PRIVATE.TESTDS.DATA')) + VOLUMES({0}) ) - + DATA (NAME('{1}.DATA')) /* """ @@ -97,12 +95,12 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME('USER.PRIVATE.TESTDS') - + DEFINE CLUSTER (NAME('{1}') - LINEAR - TRACKS(1,1) - CISZ(4096) - - VOLUMES(000000) ) - - DATA (NAME(USER.PRIVATE.TESTDS.DATA)) + VOLUMES({0}) ) - + DATA (NAME({1}.DATA)) /* """ @@ -113,9 +111,9 @@ //SYSPRINT DD SYSOUT=A //SYSIN DD * ALLOC - - DSNAME('USER.PRIVATE.TESTDS') - + DSNAME('{1}') - NEW - - VOL(000000) - + VOL({0}) - DSNTYPE(PDS) /* """ @@ -136,7 +134,6 @@ def retrieve_data_set_names(results): for result in results.contacted.values(): if len(result.get("names", [])) > 0: for name in result.get("names"): - if name.lower() != DEFAULT_DATA_SET_NAME.lower(): data_set_names.append(name) return data_set_names @@ -147,17 +144,21 @@ def print_results(results): @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): +def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_systems): + hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + dataset = get_tmp_ds_name(2, 2) try: - hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True, wait_time_s=30 ) @@ -169,22 +170,22 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): results = hosts.all.zos_job_output(job_id=submitted_job_id) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # verify first uncatalog was performed - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # verify second uncatalog shows uncatalog already performed - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is False # recatalog the data set results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is True # verify second catalog shows catalog already performed results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -192,23 +193,27 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): # clean up hosts.all.file(path=TEMP_PATH, state="absent") # Added volumes to force a catalog in case they were somehow uncataloged to avoid an duplicate on volume error - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=[VOLUME_000000, VOLUME_222222]) + hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): +def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_systems): + hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + dataset = get_tmp_ds_name(2, 2) try: - hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True ) @@ -217,39 +222,43 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 + name=dataset, state="present", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is False # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 + name=dataset, state="present", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) + hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): +def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_on_systems): + hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() + dataset = get_tmp_ds_name(2, 2) try: - hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True ) @@ -258,42 +267,46 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 + name=dataset, state="present", volumes=volume ) for result in results.contacted.values(): assert result.get("changed") is False # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=dataset, state="present", - volumes=VOLUME_000000, + volumes=volume, replace=True, ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): +def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_systems): try: + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True ) @@ -301,32 +314,37 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # ensure data set absent results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000 + name=dataset, state="absent", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl): +def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000) + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) # verify data set creation was successful @@ -334,15 +352,14 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # Create the same dataset name in different volume - jcl = jcl.replace(VOLUME_000000, VOLUME_222222) hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) # verify data set creation was successful @@ -352,11 +369,10 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="absent") # ensure data set absent - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) + results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) for result in results.contacted.values(): assert result.get("changed") is True - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") for result in results.contacted.values(): assert result.get("changed") is True @@ -365,59 +381,63 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans def test_data_set_creation_when_present_no_replace(ansible_zos_module, dstype): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype, replace=True + name=dataset, state="present", type=dstype, replace=True ) results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype + name=dataset, state="present", type=dstype ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is False assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize("dstype", data_set_types) def test_data_set_creation_when_present_replace(ansible_zos_module, dstype): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype, replace=True + name=dataset, state="present", type=dstype, replace=True ) results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype, replace=True + name=dataset, state="present", type=dstype, replace=True ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize("dstype", data_set_types) def test_data_set_creation_when_absent(ansible_zos_module, dstype): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="absent") results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype + name=dataset, state="present", type=dstype ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize("dstype", data_set_types) def test_data_set_deletion_when_present(ansible_zos_module, dstype): hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=dstype) - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="present", type=dstype) + results = hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -425,8 +445,9 @@ def test_data_set_deletion_when_present(ansible_zos_module, dstype): def test_data_set_deletion_when_absent(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="absent") + results = hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is False assert result.get("module_stderr") is None @@ -435,40 +456,42 @@ def test_data_set_deletion_when_absent(ansible_zos_module): def test_batch_data_set_creation_and_deletion(ansible_zos_module): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": DEFAULT_DATA_SET_NAME, "state": "absent"}, - {"name": DEFAULT_DATA_SET_NAME, "type": "pds", "state": "present"}, - {"name": DEFAULT_DATA_SET_NAME, "state": "absent"}, + {"name": dataset, "state": "absent"}, + {"name": dataset, "type": "pds", "state": "present"}, + {"name": dataset, "state": "absent"}, ] ) for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") def test_batch_data_set_and_member_creation(ansible_zos_module): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": DEFAULT_DATA_SET_NAME, "type": "pds", "directory_blocks": 5}, - {"name": DEFAULT_DATA_SET_NAME + "(newmem1)", "type": "member"}, + {"name": dataset, "type": "pds", "directory_blocks": 5}, + {"name": dataset + "(newmem1)", "type": "member"}, { - "name": DEFAULT_DATA_SET_NAME + "(newmem2)", + "name": dataset + "(newmem2)", "type": "member", "state": "present", }, - {"name": DEFAULT_DATA_SET_NAME, "state": "absent"}, + {"name": dataset, "state": "absent"}, ] ) for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") c_pgm="""#include <stdio.h> @@ -499,7 +522,7 @@ def test_data_member_force_delete(ansible_zos_module): MEMBER_1, MEMBER_2, MEMBER_3, MEMBER_4 = "MEM1", "MEM2", "MEM3", "MEM4" try: hosts = ansible_zos_module - + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) # set up: # create pdse results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) @@ -611,6 +634,8 @@ def test_data_member_force_delete(ansible_zos_module): def test_repeated_operations(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) + DEFAULT_DATA_SET_NAME_WITH_MEMBER = DEFAULT_DATA_SET_NAME + "(MEM)" results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, type="PDS", @@ -669,9 +694,13 @@ def test_repeated_operations(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module): +def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, @@ -679,7 +708,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) space_primary=5, space_type="CYL", record_length=15, - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -693,7 +722,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -702,9 +731,13 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): +def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, @@ -713,7 +746,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): key_offset=0, space_primary=5, space_type="CYL", - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -727,7 +760,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -736,16 +769,19 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_data_set_old_aliases(ansible_zos_module): +def test_data_set_old_aliases(ansible_zos_module, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", format="fb", size="5m", - volume=VOLUME_000000, + volume=volume_1, ) for result in results.contacted.values(): assert result.get("changed") is True @@ -779,6 +815,7 @@ def test_data_set_temp_data_set_name(ansible_zos_module): def test_data_set_temp_data_set_name_batch(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name() hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( batch=[ @@ -791,14 +828,18 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): dict( state="present", ), - dict(name=DEFAULT_DATA_SET_NAME, state="present"), + dict( + name=DEFAULT_DATA_SET_NAME, + state="present" + ), ] ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") data_set_names = retrieve_data_set_names(results) - assert len(data_set_names) == 3 + assert len(data_set_names) == 4 for name in data_set_names: - results2 = hosts.all.zos_data_set(name=name, state="absent") + if name != DEFAULT_DATA_SET_NAME: + results2 = hosts.all.zos_data_set(name=name, state="absent") for result in results2.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -819,7 +860,7 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): def test_filesystem_create_and_mount(ansible_zos_module, filesystem): fulltest = True hosts = ansible_zos_module - + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(1, 1) try: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @@ -876,6 +917,7 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): def test_data_set_creation_zero_values(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", @@ -895,15 +937,16 @@ def test_data_set_creation_zero_values(ansible_zos_module): def test_data_set_creation_with_tmp_hlq(ansible_zos_module): try: - tmphlq = "TMPHLQ" + tmphlq = "ANSIBLE" hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set(state="present", tmp_hlq=tmphlq) dsname = None for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None for dsname in result.get("names"): - assert dsname[:6] == tmphlq + assert dsname[:7] == tmphlq finally: if dsname: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @@ -912,16 +955,19 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): "formats", ["F","FB", "VB", "FBA", "VBA", "U"], ) -def test_data_set_f_formats(ansible_zos_module, formats): +def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", format=formats, size="5m", - volume=VOLUME_000000, + volume=volume_1, ) for result in results.contacted.values(): assert result.get("changed") is True diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 7b7952387..5d58f2435 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -15,12 +15,13 @@ from shellescape import quote from pprint import pprint from os import path +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type -USS_FILE = "/tmp/encode.data" +USS_FILE = "/tmp/encode_data" USS_NONE_FILE = "/tmp/none" -USS_DEST_FILE = "/tmp/converted.data" +USS_DEST_FILE = "/tmp/converted_data" USS_PATH = "/tmp/src" USS_DEST_PATH = "/tmp/dest" MVS_PS = "encode.ps" @@ -48,18 +49,17 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DELETE ENCODE.TEST.VS SET MAXCC=0 DEFINE CLUSTER - - (NAME(ENCODE.TEST.VS) - + (NAME({0}) - INDEXED - KEYS(4 0) - RECSZ(80 80) - RECORDS(100) - SHAREOPTIONS(2 3) - VOLUMES(000000) ) - - DATA (NAME(ENCODE.TEST.VS.DATA)) - - INDEX (NAME(ENCODE.TEST.VS.INDEX)) + DATA (NAME({0}.DATA)) - + INDEX (NAME({0}.INDEX)) /* """ @@ -77,25 +77,62 @@ /* """ +VSAM_RECORDS = """00000001A record +00000002A record +00000003A record +""" + +def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): + """Creates a new VSAM on the system. + + Arguments: + hosts (object) -- Ansible instance(s) that can call modules. + name (str) -- Name of the VSAM data set. + type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + add_data (bool, optional) -- Whether to add records to the VSAM. + key_length (int, optional) -- Key length (only for KSDS data sets). + key_offset (int, optional) -- Key offset (only for KSDS data sets). + """ + params = dict( + name=name, + type=ds_type, + state="present" + ) + if ds_type == "KSDS": + params["key_length"] = key_length + params["key_offset"] = key_offset + + hosts.all.zos_data_set(**params) + + if add_data: + record_src = "/tmp/zos_copy_vsam_record" + + hosts.all.shell(cmd="echo {0} >> {1}".format(quote(VSAM_RECORDS), record_src)) + hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + hosts.all.file(path=record_src, state="absent") def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_encode( - src=USS_FILE, - encoding={ - "from": INVALID_ENCODING, - "to": TO_ENCODING, - }, - ) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("msg") is not None - assert result.get("backup_name") is None - assert result.get("changed") is False + try: + hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + results = hosts.all.zos_encode( + src=USS_FILE, + encoding={ + "from": INVALID_ENCODING, + "to": TO_ENCODING, + }, + ) + for result in results.contacted.values(): + assert result.get("msg") is not None + assert result.get("backup_name") is None + assert result.get("changed") is False + finally: + hosts.all.file(path=USS_FILE, state="absent") def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): hosts = ansible_zos_module + hosts.all.copy(content=TEST_DATA, dest=USS_FILE) results = hosts.all.zos_encode( src=USS_FILE, encoding={ @@ -108,6 +145,7 @@ def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): assert result.get("msg") is not None assert result.get("backup_name") is None assert result.get("changed") is False + hosts.all.file(path=USS_FILE, state="absent") def test_uss_encoding_conversion_without_dest(ansible_zos_module): @@ -165,6 +203,8 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() + MVS_NONE_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") hosts.all.zos_data_set(name=MVS_NONE_PS, state="absent") @@ -182,6 +222,8 @@ def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): assert result.get("dest") == MVS_NONE_PS assert result.get("backup_name") is None assert result.get("changed") is False + hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=MVS_NONE_PS, state="absent") def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): @@ -197,7 +239,6 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == USS_DEST_FILE @@ -256,7 +297,6 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_PATH assert result.get("dest") == USS_DEST_PATH @@ -277,6 +317,7 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") results = hosts.all.zos_encode( @@ -287,7 +328,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_PS @@ -295,11 +335,15 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_PS, state="absent") def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() + hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") + hosts.all.copy(content=TEST_DATA, dest=MVS_PS) hosts.all.copy(content="test", dest=USS_DEST_FILE) results = hosts.all.zos_encode( src=MVS_PS, @@ -310,7 +354,6 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PS assert result.get("dest") == USS_DEST_FILE @@ -323,11 +366,13 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") + hosts.all.zos_data_set(name=MVS_PS, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_encode( @@ -338,7 +383,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_PDS @@ -346,16 +390,19 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_data_set( name=MVS_PDS_MEMBER, type="member", state="present" ) - pprint(vars(results)) for result in results.contacted.values(): # documentation will return changed=False if ds exists and replace=False.. # assert result.get("changed") is True @@ -368,7 +415,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_PDS_MEMBER @@ -376,11 +422,19 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS_MEMBER, type="member", state="present" + ) + hosts.all.copy(content=TEST_DATA, dest=MVS_PDS_MEMBER) hosts.all.copy(content="test", dest=USS_DEST_FILE) results = hosts.all.zos_encode( src=MVS_PDS_MEMBER, @@ -391,7 +445,6 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PDS_MEMBER assert result.get("dest") == USS_DEST_FILE @@ -404,11 +457,13 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.file(path=USS_PATH, state="directory") hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode1") hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode2") @@ -421,19 +476,11 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_PATH assert result.get("dest") == MVS_PDS assert result.get("backup_name") is None assert result.get("changed") is True - finally: - hosts.all.file(path=USS_PATH, state="absent") - - -def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): - try: - hosts = ansible_zos_module hosts.all.file(path=USS_DEST_PATH, state="directory") results = hosts.all.zos_encode( src=MVS_PDS, @@ -443,8 +490,8 @@ def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): + assert result.get("src") == MVS_PDS assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None @@ -455,11 +502,22 @@ def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): assert FROM_ENCODING in result.get("stdout") assert "untagged" not in result.get("stdout") finally: + hosts.all.file(path=USS_PATH, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.file(path=USS_DEST_PATH, state="absent") def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + MVS_PS = get_tmp_ds_name() + hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") + hosts.all.shell(cmd="cp {0} \"//'{1}'\" ".format(quote(TEST_DATA), MVS_PS)) + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") + hosts.all.zos_data_set( + name=MVS_PDS_MEMBER, type="member", state="present" + ) results = hosts.all.zos_encode( src=MVS_PS, dest=MVS_PDS_MEMBER, @@ -468,21 +526,23 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): + print(result) assert result.get("src") == MVS_PS assert result.get("dest") == MVS_PDS_MEMBER assert result.get("backup_name") is None assert result.get("changed") is True - + hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): try: hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name(3) hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True @@ -500,7 +560,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_VS @@ -509,12 +568,16 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_VS, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.copy(content="test", dest=USS_DEST_FILE) + mlq_size = 3 + MVS_VS = get_tmp_ds_name(mlq_size) + create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + hosts.all.file(path=USS_DEST_FILE, state="touch") results = hosts.all.zos_encode( src=MVS_VS, dest=USS_DEST_FILE, @@ -524,7 +587,6 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_VS assert result.get("dest") == USS_DEST_FILE @@ -542,10 +604,14 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") + hosts.all.zos_data_set(name=MVS_VS, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() + MVS_VS = get_tmp_ds_name() + create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_encode( @@ -556,16 +622,25 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_VS assert result.get("dest") == MVS_PS assert result.get("backup_name") is None assert result.get("changed") is True + hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.zos_data_set(name=MVS_PS, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name() + MVS_PDS = get_tmp_ds_name() + create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS_MEMBER, type="member", state="present" + ) results = hosts.all.zos_encode( src=MVS_VS, dest=MVS_PDS_MEMBER, @@ -575,31 +650,35 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): }, ) hosts.all.zos_data_set(name=MVS_PDS, state="absent") - pprint(vars(results)) for result in results.contacted.values(): + print(result) assert result.get("src") == MVS_VS assert result.get("dest") == MVS_PDS_MEMBER assert result.get("backup_name") is None assert result.get("changed") is True + hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): try: hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name(3) + MVS_PS = get_tmp_ds_name() + hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True ) - print("test_uss_encoding_conversion_mvs_ps_to_mvs_vsam") - pprint(vars(results)) for result in results.contacted.values(): assert result.get("jobs") is not None assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + #hosts.all.zos_copy(content=TEST_DATA, dest=MVS_PS) results = hosts.all.zos_encode( src=MVS_PS, dest=MVS_VS, @@ -608,7 +687,6 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PS assert result.get("dest") == MVS_VS @@ -617,11 +695,13 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=MVS_VS, state="absent") def test_pds_backup(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") @@ -649,6 +729,7 @@ def test_pds_backup(ansible_zos_module): def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() tmphlq = "TMPHLQ" hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="absent") @@ -682,6 +763,7 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): def test_ps_backup(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") @@ -708,6 +790,8 @@ def test_ps_backup(ansible_zos_module): def test_vsam_backup(ansible_zos_module): try: hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name() + MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_VS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") @@ -716,7 +800,7 @@ def test_vsam_backup(ansible_zos_module): ) hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True @@ -741,12 +825,6 @@ def test_vsam_backup(ansible_zos_module): "to": TO_ENCODING, }, ) - contents = hosts.all.shell(cmd="cat \"//'{0}'\"".format(MVS_PS)) - content1 = "" - hosts.all.zos_data_set(name=MVS_PS, state="absent") - for content in contents.contacted.values(): - content1 = content.get("stdout") - print(contents.contacted.values()) hosts.all.zos_encode( src=MVS_VS, encoding={ @@ -767,13 +845,6 @@ def test_vsam_backup(ansible_zos_module): "to": TO_ENCODING, }, ) - - contents = hosts.all.shell(cmd="cat \"//'{0}'\"".format(MVS_PS)) - content2 = "" - print(contents.contacted.values()) - for content in contents.contacted.values(): - content2 = content.get("stdout") - assert content1 and (content1 == content2) finally: hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_VS, state="absent") @@ -784,6 +855,7 @@ def test_vsam_backup(ansible_zos_module): def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") # create and fill PDS hosts.all.zos_data_set(name=MVS_PDS, state="absent") @@ -844,10 +916,11 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) def test_uss_backup_entire_folder_to_default_backup_location_compressed( - ansible_zos_module, + ansible_zos_module ): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") # create and fill PDS hosts.all.zos_data_set(name=MVS_PDS, state="absent") @@ -895,6 +968,7 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") @@ -930,4 +1004,4 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") \ No newline at end of file diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 3b4a9c371..357540876 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -21,6 +21,9 @@ from ansible.utils.hashing import checksum from shellescape import quote +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + __metaclass__ = type @@ -29,12 +32,6 @@ DUMMY DATA == LINE 03 == """ - -TEST_PS = "USER.PRIV.TEST" -TEST_PS_VB = "USER.PRIV.PSVB" -TEST_PDS = "USER.PRIV.TESTPDS" -TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(MEM1)" -TEST_VSAM = "FETCH.TEST.VS" FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" USS_FILE = "/tmp/fetch.data" @@ -48,18 +45,17 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DELETE FETCH.TEST.VS SET MAXCC=0 DEFINE CLUSTER - - (NAME(FETCH.TEST.VS) - + (NAME({1}) - INDEXED - KEYS(4 0) - RECSZ(200 200) - RECORDS(100) - SHAREOPTIONS(2 3) - - VOLUMES(000000) ) - - DATA (NAME(FETCH.TEST.VS.DATA)) - - INDEX (NAME(FETCH.TEST.VS.INDEX)) + VOLUMES({0}) ) - + DATA (NAME({1}.DATA)) - + INDEX (NAME({1}.INDEX)) /* """ KSDS_REPRO_JCL = """//DOREPRO JOB (T043JM,JM00,1,0,0,0),'CREATE KSDS',CLASS=R, @@ -90,21 +86,21 @@ def extract_member_name(data_set): member += data_set[i] return member -def create_and_populate_test_ps_vb(ansible_zos_module): +def create_and_populate_test_ps_vb(ansible_zos_module, name): params=dict( - name=TEST_PS_VB, + name=name, type='SEQ', record_format='VB', record_length='3180', block_size='3190' ) ansible_zos_module.all.zos_data_set(**params) - ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS_VB)) + ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, name)) -def delete_test_ps_vb(ansible_zos_module): +def delete_test_ps_vb(ansible_zos_module, name): params=dict( - name=TEST_PS_VB, + name=name, state='absent' ) ansible_zos_module.all.zos_data_set(**params) @@ -191,6 +187,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) @@ -211,7 +208,8 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): def test_fetch_sequential_data_set_variable_block(ansible_zos_module): hosts = ansible_zos_module - create_and_populate_test_ps_vb(ansible_zos_module) + TEST_PS_VB = get_tmp_ds_name(3) + create_and_populate_test_ps_vb(ansible_zos_module, TEST_PS_VB) params = dict(src=TEST_PS_VB, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS_VB try: @@ -225,12 +223,14 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) - delete_test_ps_vb(ansible_zos_module) + delete_test_ps_vb(ansible_zos_module, TEST_PS_VB) def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True) @@ -250,30 +250,33 @@ def test_fetch_partitioned_data_set(ansible_zos_module): shutil.rmtree(dest_path) -def test_fetch_vsam_data_set(ansible_zos_module): +def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - TEMP_JCL_PATH = "/tmp/ansible" - dest_path = "/tmp/" + TEST_VSAM + temp_jcl_path = "/tmp/ansible" + test_vsam = get_tmp_ds_name() + dest_path = "/tmp/" + test_vsam + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: # start by creating the vsam dataset (could use a helper instead? ) - hosts.all.file(path=TEMP_JCL_PATH, state="directory") + hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait=True ) hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( src=USS_FILE, - dest=TEST_VSAM, + dest=test_vsam, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) - params = dict(src=TEST_VSAM, dest="/tmp/", flat=True, is_binary=True) + params = dict(src=test_vsam, dest="/tmp/", flat=True, is_binary=True) results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is True @@ -291,7 +294,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): None os.remove(dest_path) hosts.all.file(path=USS_FILE, state="absent") - hosts.all.file(path=TEMP_JCL_PATH, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") def test_fetch_vsam_empty_data_set(ansible_zos_module): @@ -316,7 +319,9 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PDS, state="present") + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict( @@ -341,6 +346,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) @@ -361,7 +367,9 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) @@ -383,7 +391,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_empty(ansible_zos_module): hosts = ansible_zos_module - src = "USER.TEST.EMPTY.SEQ" + src = get_tmp_ds_name() params = dict(src=src, dest="/tmp/", flat=True) dest_path = "/tmp/" + src try: @@ -404,7 +412,7 @@ def test_fetch_sequential_data_set_empty(ansible_zos_module): def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): hosts = ansible_zos_module - pds_name = "ZOS.FETCH.TEST.PDS" + pds_name = get_tmp_ds_name() hosts.all.zos_data_set( name=pds_name, type="pds", @@ -425,7 +433,7 @@ def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): hosts = ansible_zos_module - pds_name = "ZOS.FETCH.TEST.PDS" + pds_name = get_tmp_ds_name() hosts.all.zos_data_set( name=pds_name, type="pds", @@ -434,8 +442,9 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): record_format="fba", record_length=25, ) + hosts.all.zos_data_set(name=pds_name, type="pds") hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes") - params = dict(src="ZOS.FETCH.TEST.PDS(MYDATA)", dest="/tmp/", flat=True) + params = dict(src=pds_name + "(MYDATA)", dest="/tmp/", flat=True) dest_path = "/tmp/MYDATA" try: results = hosts.all.zos_fetch(**params) @@ -482,8 +491,9 @@ def test_fetch_missing_uss_file_fails(ansible_zos_module): def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): hosts = ansible_zos_module + src = get_tmp_ds_name() params = dict( - src="FETCH.TEST.DATA.SET", dest="/tmp/", flat=True, fail_on_missing=False + src=src, dest="/tmp/", flat=True, fail_on_missing=False ) try: results = hosts.all.zos_fetch(**params) @@ -498,6 +508,7 @@ def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): def test_fetch_partitioned_data_set_member_missing_fails(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() params = dict(src=TEST_PDS + "(DUMMY)", dest="/tmp/", flat=True) try: results = hosts.all.zos_fetch(**params) @@ -510,7 +521,8 @@ def test_fetch_partitioned_data_set_member_missing_fails(ansible_zos_module): def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): hosts = ansible_zos_module - params = dict(src="ZOS.FETCH.TEST.PDS", dest="/tmp/", flat=True) + src = get_tmp_ds_name() + params = dict(src=src, dest="/tmp/", flat=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -522,6 +534,7 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") @@ -546,7 +559,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module - pds_name = "ZOS.FETCH.TEST.PDS" + pds_name = get_tmp_ds_name() dest_path = "/tmp/" + pds_name full_path = dest_path + "/MYDATA" hosts.all.zos_data_set( @@ -596,6 +609,7 @@ def test_fetch_uss_file_insufficient_write_permission_fails(ansible_zos_module): def test_fetch_pds_dir_insufficient_write_permission_fails(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() dest_path = "/tmp/" + TEST_PDS os.mkdir(dest_path) os.chmod(dest_path, stat.S_IREAD) @@ -611,12 +625,14 @@ def test_fetch_pds_dir_insufficient_write_permission_fails(ansible_zos_module): def test_fetch_use_data_set_qualifier(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/TEST.USER.QUAL" - hosts.all.zos_data_set(name="OMVSADM.TEST.USER.QUAL", type="seq", state="present") - params = dict(src="TEST.USER.QUAL", dest="/tmp/", flat=True, use_qualifier=True) + src = get_tmp_ds_name()[:25] + dest_path = "/tmp/"+ src + hosts.all.zos_data_set(name="OMVSADM." + src, type="seq", state="present") + params = dict(src=src, dest="/tmp/", flat=True, use_qualifier=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): + print(result) assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None @@ -624,7 +640,7 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) - hosts.all.zos_data_set(src="OMVSADM.TEST.USER.QUAL", state="absent") + hosts.all.zos_data_set(src="OMVSADM." + src, state="absent") def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 79df4efac..50782be0b 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -14,6 +14,8 @@ __metaclass__ = type +from ibm_zos_core.tests.helpers.volumes import Volume_Handler + SEQ_NAMES = [ "TEST.FIND.SEQ.FUNCTEST.FIRST", "TEST.FIND.SEQ.FUNCTEST.SECOND", @@ -280,15 +282,18 @@ def test_find_vsam_pattern(ansible_zos_module): ) -def test_find_vsam_in_volume(ansible_zos_module): +def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - alternate_vsam = "TEST.FIND.ALTER.VSAM" + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() + alternate_vsam = "TEST.FIND.VSAM.SECOND" try: for vsam in VSAM_NAMES: - create_vsam_ksds(vsam, hosts, volume="222222") - create_vsam_ksds(alternate_vsam, hosts, volume="000000") + create_vsam_ksds(vsam, hosts, volume=volume_1) + create_vsam_ksds(alternate_vsam, hosts, volume=volume_2) find_res = hosts.all.zos_find( - patterns=['TEST.FIND.*.*.*'], volumes=['222222'], resource_type='cluster' + patterns=['TEST.FIND.*.*.*'], volumes=[volume_1], resource_type='cluster' ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -355,4 +360,4 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): finally: hosts.all.zos_data_set( batch=[dict(name=i, state='absent') for i in PDS_NAMES] - ) + ) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index b7c412cd4..c0dc5bdca 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -23,6 +23,7 @@ from shellescape import quote import tempfile +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # Make sure job list * returns something def test_zos_job_query_func(ansible_zos_module): @@ -45,14 +46,12 @@ def test_zos_job_query_func(ansible_zos_module): """ TEMP_PATH = "/tmp/jcl" -JDATA_SET_NAME = "imstestl.ims1.testq1" -NDATA_SET_NAME = "imstestl.ims1.testq2" -DEFAULT_VOLUME = "000000" # test to show multi wildcard in Job_id query won't crash the search def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module + JDATA_SET_NAME = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) @@ -85,6 +84,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module + NDATA_SET_NAME = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 44dfdbf01..0fe6a59b9 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -20,8 +20,9 @@ import pytest import re import os -from pprint import pprint +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # ############################################################################## # Configure the job card as needed, most common keyword parameters: @@ -259,25 +260,24 @@ //STEP1 EXEC PGM=BPXBATCH,PARM='PGM /bin/sleep 5'""" TEMP_PATH = "/tmp/jcl" -DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" -DEFAULT_VOLUME = "000000" def test_job_submit_PDS(ansible_zos_module): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(DATA_SET_NAME), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait=True ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -285,7 +285,7 @@ def test_job_submit_PDS(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_special_characters(ansible_zos_module): @@ -374,9 +374,12 @@ def test_job_submit_LOCAL_BADJCL(ansible_zos_module): assert re.search(r'completion code', repr(result.get("msg"))) -def test_job_submit_PDS_volume(ansible_zos_module): +def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( @@ -384,30 +387,31 @@ def test_job_submit_PDS_volume(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True, volumes=DEFAULT_VOLUME + name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="uncataloged", type="pds" + name=data_set_name, state="uncataloged", type="pds" ) - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(SAMPLE)", location="DATA_SET", volume=DEFAULT_VOLUME) + results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get('changed') is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") wait_time_s = 15 @@ -416,15 +420,15 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) ) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(BPXSLEEP)", + results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", location="DATA_SET", wait_time_s=wait_time_s) for result in results.contacted.values(): @@ -434,12 +438,13 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): assert result.get('duration') <= wait_time_s finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") wait_time_s = 60 @@ -448,15 +453,15 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) ) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(BPXSLEEP)", + results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", location="DATA_SET", wait_time_s=wait_time_s) for result in results.contacted.values(): @@ -466,12 +471,13 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): assert result.get('duration') <= wait_time_s finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): """This submits a 30 second job and only waits 10 seconds""" try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") wait_time_s = 10 @@ -480,15 +486,15 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) ) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(BPXSLEEP)", + results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", location="DATA_SET", wait_time_s=wait_time_s) for result in results.contacted.values(): @@ -499,7 +505,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): assert re.search(r'exceeded', repr(result.get("msg"))) finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") @pytest.mark.parametrize("args", [ diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index e415a76e8..256a21c71 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -18,9 +18,10 @@ import pytest import inspect +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + __metaclass__ = type -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" TEST_FOLDER_LINEINFILE = "/tmp/ansible-core-tests/zos_lineinfile/" c_pgm="""#include <stdio.h> @@ -549,9 +550,8 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST1" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -572,9 +572,8 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") - test_name = "DST2" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -595,9 +594,8 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") - test_name = "DST3" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -617,9 +615,8 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") - test_name = "DST4" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -640,9 +637,8 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST5" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -663,9 +659,8 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") - test_name = "DST6" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -686,9 +681,8 @@ def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST7" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -709,9 +703,8 @@ def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") - test_name = "DST8" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -732,9 +725,8 @@ def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype) hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST9" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -755,9 +747,8 @@ def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") - test_name = "DST10" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -778,9 +769,8 @@ def test_ds_line_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="ZOAU_ROOT=", line="", state="absent") - test_name = "DST11" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -802,17 +792,10 @@ def test_ds_tmp_hlq_option(ansible_zos_module): ds_type = "SEQ" kwargs = dict(backup_name=r"TMPHLQ\..") params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") - test_name = "DST12" - temp_file = "/tmp/zos_lineinfile/" + test_name content = TEST_CONTENT try: - hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - if len(hlq) > 8: - hlq = hlq[:8] - ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + ds_full_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_full_name hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) @@ -837,14 +820,8 @@ def test_ds_not_supported(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST13" - ds_name = test_name.upper() + "." + ds_type try: - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + "." + ds_type results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') for result in results.contacted.values(): assert result.get("changed") is True @@ -862,21 +839,22 @@ def test_ds_not_supported(ansible_zos_module, dstype): def test_ds_line_force(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="True") MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT if ds_type == "SEQ": - params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name+"({0})".format(MEMBER_2) try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -895,7 +873,7 @@ def test_ds_line_force(ansible_zos_module, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( call_c_jcl.format( - DEFAULT_DATA_SET_NAME, + default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") @@ -914,7 +892,7 @@ def test_ds_line_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds @@ -922,18 +900,19 @@ def test_ds_line_force(ansible_zos_module, dstype): def test_ds_line_force_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="False") MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name + "({0})".format(MEMBER_2) content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -949,7 +928,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( call_c_jcl.format( - DEFAULT_DATA_SET_NAME, + default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") @@ -965,7 +944,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds @@ -974,9 +953,8 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") - test_name = "DST15" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1033,9 +1011,8 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): insert_data = "Insert this string" params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) params["encoding"] = encoding - test_name = "DST13" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 4021af625..8883ddebc 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -17,6 +17,9 @@ MissingZOAUImport, ) +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + try: from zoautil_py import Datasets except Exception: @@ -66,7 +69,7 @@ def populate_tmpfile(): return tmp_file_filename -def create_sourcefile(hosts): +def create_sourcefile(hosts, volume): starter = get_sysname(hosts).split(".")[0].upper() if len(starter) < 2: starter = "IMSTESTU" @@ -83,7 +86,7 @@ def create_sourcefile(hosts): hosts.all.shell( cmd="zfsadm define -aggregate " + thisfile - + " -volumes 222222 -cylinders 200 1", + + " -volumes {0} -cylinders 200 1".format(volume), executable=SHELL_EXECUTABLE, stdin="", ) @@ -95,9 +98,11 @@ def create_sourcefile(hosts): return thisfile -def test_basic_mount(ansible_zos_module): +def test_basic_mount(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" @@ -116,9 +121,12 @@ def test_basic_mount(ansible_zos_module): hosts.all.file(path="/pythonx/", state="absent") -def test_double_mount(ansible_zos_module): + +def test_double_mount(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") # The duplication here is intentional... want to make sure it is seen @@ -139,9 +147,11 @@ def test_double_mount(ansible_zos_module): hosts.all.file(path="/pythonx/", state="absent") -def test_remount(ansible_zos_module): +def test_remount(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") mount_result = hosts.all.zos_mount( @@ -160,9 +170,11 @@ def test_remount(ansible_zos_module): hosts.all.file(path="/pythonx/", state="absent") -def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module): +def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) tmp_file_filename = "/tmp/testfile.txt" @@ -177,8 +189,8 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module): stdin="", ) - dest = "USER.TEST.BPX.PDS" - dest_path = "USER.TEST.BPX.PDS(AUTO1)" + dest = get_tmp_ds_name() + dest_path = dest + "(AUTO1)" hosts.all.zos_data_set( name=dest, @@ -229,9 +241,11 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module): ) -def test_basic_mount_with_bpx_comment_backup(ansible_zos_module): +def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) tmp_file_filename = "/tmp/testfile.txt" @@ -258,9 +272,9 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module): print("\n====================================================\n") - dest = "USER.TEST.BPX.PDS" - dest_path = "USER.TEST.BPX.PDS(AUTO2)" - back_dest_path = "USER.TEST.BPX.PDS(AUTO2BAK)" + dest = get_tmp_ds_name() + dest_path = dest + "(AUTO2)" + back_dest_path = dest + "(AUTO2BAK)" hosts.all.zos_data_set( name=dest, @@ -347,10 +361,11 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module): record_length=80, ) - -def test_basic_mount_with_tmp_hlq_option(ansible_zos_module): +def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" @@ -361,7 +376,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module): assert result.get("changed") is True finally: tmphlq = "TMPHLQ" - persist_data_set = "MTEST.TEST.PERSIST" + persist_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=persist_data_set, state="present", type="SEQ") unmount_result = hosts.all.zos_mount( src=srcfn, diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index 72bf0bd0a..fd20a6a92 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -18,10 +18,11 @@ import pytest from pprint import pprint +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + +DATASET = "" EXISTING_DATA_SET = "user.private.proclib" -DEFAULT_DATA_SET = "user.private.rawds" -DEFAULT_DATA_SET_2 = "user.private.rawds2" -DEFAULT_DATA_SET_WITH_MEMBER = "{0}(mem1)".format(DEFAULT_DATA_SET) DEFAULT_PATH = "/tmp/testdir" DEFAULT_PATH_WITH_FILE = "{0}/testfile".format(DEFAULT_PATH) DEFAULT_DD = "MYDD" @@ -29,7 +30,6 @@ SYSPRINT_DD = "SYSPRINT" IDCAMS_STDIN = " LISTCAT ENTRIES('{0}')".format(EXISTING_DATA_SET.upper()) IDCAMS_INVALID_STDIN = " hello world #$!@%!#$!@``~~^$*%" -DEFAULT_VOLUME = "000000" # ---------------------------------------------------------------------------- # @@ -51,7 +51,8 @@ def test_failing_name_format(ansible_zos_module): def test_disposition_new(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -59,7 +60,7 @@ def test_disposition_new(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -73,7 +74,7 @@ def test_disposition_new(ansible_zos_module): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -83,8 +84,9 @@ def test_disposition_new(ansible_zos_module): def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -93,7 +95,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition=disposition, return_content=dict(type="text"), ), @@ -106,14 +108,17 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") -def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): +def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module tmphlq = "TMPHLQ" + volumes = Volume_Handler(volumes_on_systems) + default_volume = volumes.get_available_vol() + default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -123,7 +128,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", return_content=dict(type="text"), replace=True, @@ -132,7 +137,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): space_primary=5, space_secondary=1, space_type="m", - volumes=DEFAULT_VOLUME, + volumes=default_volume, record_format="fb" ), ), @@ -145,7 +150,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 for backup in result.get("backups"): backup.get("backup_name")[:6] == tmphlq - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") for result in results.contacted.values(): pprint(result) assert result.get("changed", False) is True @@ -155,7 +160,9 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): def test_new_disposition_for_data_set_members(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -177,7 +184,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -187,8 +194,10 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposition): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="pds", state="present", replace=True + name=default_data_set, type="pds", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -210,22 +219,25 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "normal_disposition,changed", [("keep", True), ("delete", True), ("catalog", True), ("uncatalog", True)], ) -def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, changed): +def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, changed, volumes_on_systems): try: hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, + name=default_data_set, type="seq", state="present", replace=True, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,10 +246,10 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="shr", disposition_normal=normal_disposition, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], return_content=dict(type="text"), ), ), @@ -249,7 +261,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -265,7 +277,8 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -273,7 +286,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", space_primary=primary, @@ -286,7 +299,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte ], ) - results2 = hosts.all.command(cmd="dls -l -s {0}".format(DEFAULT_DATA_SET)) + results2 = hosts.all.command(cmd="dls -l -s {0}".format(default_data_set)) for result in results.contacted.values(): pprint(result) @@ -297,17 +310,20 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte pprint(result) assert str(expected) in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "data_set_type", ["pds", "pdse", "large", "basic", "seq"], ) -def test_data_set_types_non_vsam(ansible_zos_module, data_set_type): +def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -315,32 +331,35 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type=data_set_type, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), ], ) - results = hosts.all.command(cmd="dls {0}".format(DEFAULT_DATA_SET)) + results = hosts.all.command(cmd="dls {0}".format(default_data_set)) for result in results.contacted.values(): pprint(result) assert "BGYSC1103E" not in result.get("stderr", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "data_set_type", ["ksds", "rrds", "lds", "esds"], ) -def test_data_set_types_vsam(ansible_zos_module, data_set_type): +def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -349,22 +368,22 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type=data_set_type, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ) if data_set_type != "ksds" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type=data_set_type, key_length=5, key_offset=0, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -372,22 +391,24 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type): ) # * we hope to see EDC5041I An error was detected at the system level when opening a file. # * because that means data set exists and is VSAM so we can't read it - results = hosts.all.command(cmd="head \"//'{0}'\"".format(DEFAULT_DATA_SET)) + results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set)) for result in results.contacted.values(): - pprint(result) assert "EDC5041I" in result.get("stderr", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "record_format", ["u", "vb", "vba", "fb", "fba"], ) -def test_record_formats(ansible_zos_module, record_format): +def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -395,23 +416,23 @@ def test_record_formats(ansible_zos_module, record_format): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", record_format=record_format, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), ], ) - results = hosts.all.command(cmd="dls -l {0}".format(DEFAULT_DATA_SET)) + results = hosts.all.command(cmd="dls -l {0}".format(default_data_set)) for result in results.contacted.values(): pprint(result) assert str(" {0} ".format(record_format.upper())) in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -424,15 +445,18 @@ def test_record_formats(ansible_zos_module, record_format): ), ], ) -def test_return_content_type(ansible_zos_module, return_content_type, expected): +def test_return_content_type(ansible_zos_module, return_content_type, expected, volumes_on_systems): try: hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, + name=default_data_set, type="seq", state="present", replace=True, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -441,9 +465,9 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="shr", - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], return_content=dict(type=return_content_type), ), ), @@ -457,7 +481,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected): assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent", volumes=[DEFAULT_VOLUME]) + hosts.all.zos_data_set(name=default_data_set, state="absent", volumes=[volume_1]) @pytest.mark.parametrize( @@ -472,16 +496,19 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected): ], ) def test_return_text_content_encodings( - ansible_zos_module, src_encoding, response_encoding, expected + ansible_zos_module, src_encoding, response_encoding, expected, volumes_on_systems ): try: + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, + name=default_data_set, type="seq", state="present", replace=True, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -490,9 +517,9 @@ def test_return_text_content_encodings( dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="shr", - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], return_content=dict( type="text", src_encoding=src_encoding, @@ -509,14 +536,15 @@ def test_return_text_content_encodings( assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent", volumes=[DEFAULT_VOLUME]) + hosts.all.zos_data_set(name=default_data_set, state="absent", volumes=[volume_1]) def test_reuse_existing_data_set(ansible_zos_module): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -525,7 +553,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", reuse=True, @@ -541,14 +569,15 @@ def test_reuse_existing_data_set(ansible_zos_module): assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_replace_existing_data_set(ansible_zos_module): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -557,7 +586,7 @@ def test_replace_existing_data_set(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -573,13 +602,14 @@ def test_replace_existing_data_set(ansible_zos_module): assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_replace_existing_data_set_make_backup(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_mvs_raw( program_name="IDCAMS", auth=True, @@ -587,7 +617,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -604,7 +634,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -629,13 +659,13 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): ) assert ( result.get("backups")[0].get("original_name").lower() - == DEFAULT_DATA_SET.lower() + == default_data_set.lower() ) for result in results2.contacted.values(): pprint(result) assert "IDCAMS" in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") # ---------------------------------------------------------------------------- # @@ -646,7 +676,8 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): def test_input_empty(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -654,7 +685,7 @@ def test_input_empty(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -668,13 +699,14 @@ def test_input_empty(ansible_zos_module): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_input_large(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") contents = "" for i in range(50000): contents += "this is line {0}\n".format(i) @@ -685,7 +717,7 @@ def test_input_large(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -700,13 +732,14 @@ def test_input_large(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 assert len(result.get("dd_names", [{}])[0].get("content")) > 100000 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_input_provided_as_list(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") contents = [] for i in range(10): contents.append(IDCAMS_STDIN) @@ -717,7 +750,7 @@ def test_input_provided_as_list(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -732,7 +765,7 @@ def test_input_provided_as_list(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 assert len(result.get("dd_names", [{}])[0].get("content")) > 100 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -748,7 +781,8 @@ def test_input_provided_as_list(ansible_zos_module): def test_input_return_content_types(ansible_zos_module, return_content_type, expected): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -756,7 +790,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", ), @@ -776,7 +810,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -799,7 +833,8 @@ def test_input_return_text_content_encodings( ): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -807,7 +842,7 @@ def test_input_return_text_content_encodings( dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", ), @@ -831,7 +866,7 @@ def test_input_return_text_content_encodings( assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") # ---------------------------------------------------------------------------- # @@ -1302,7 +1337,9 @@ def test_dummy(ansible_zos_module): def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_2 = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1314,7 +1351,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dds=[ dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -1345,14 +1382,16 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="present", type="seq") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_2 = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1364,7 +1403,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dds=[ dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -1403,7 +1442,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu ) assert ( result.get("backups")[0].get("original_name").lower() - == DEFAULT_DATA_SET.lower() + == default_data_set.lower() ) assert ( result.get("backups")[1].get("original_name").lower() @@ -1413,14 +1452,17 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") def test_concatenation_with_data_set_member(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="present", type="pds") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_2 = get_tmp_ds_name() + DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1467,13 +1509,14 @@ def test_concatenation_with_data_set_member(ansible_zos_module): pprint(result) assert "IDCAMS" in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") -def test_concatenation_with_unix_dd_and_response(ansible_zos_module): +def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_2 = get_tmp_ds_name() hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") @@ -1520,7 +1563,7 @@ def test_concatenation_with_unix_dd_and_response(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") -def test_concatenation_with_unix_dd_and_response(ansible_zos_module): +def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") @@ -1617,7 +1660,7 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ), dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name="ANSIBLE.USER.PRIVATE.TEST", disposition="shr", return_content=dict(type="text"), ) @@ -1649,7 +1692,7 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): dds=[ dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name="ANSIBLE.USER.PRIVATE.TEST", disposition="shr", return_content=dict(type="text"), ) @@ -1693,7 +1736,7 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ), dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name="ANSIBLE.USER.PRIVATE.TEST", disposition="shr", return_content=dict(type="text"), ) @@ -1722,12 +1765,12 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_content): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="present", type="seq") + default_data_set = "ANSIBLE.USER.PRIVATE.TEST" + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 2 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -1736,7 +1779,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co ) finally: hosts.all.file(name=DEFAULT_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") # ---------------------------------------------------------------------------- # @@ -1747,7 +1790,8 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co def test_authorized_program_run_unauthorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=False, @@ -1759,13 +1803,14 @@ def test_authorized_program_run_unauthorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 assert "BGYSC0236E" in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_unauthorized_program_run_authorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="DSPURX00", auth=True, @@ -1777,13 +1822,14 @@ def test_unauthorized_program_run_authorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 assert "BGYSC0215E" in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_authorized_program_run_authorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1802,13 +1848,14 @@ def test_authorized_program_run_authorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 1 assert "BGYSC0236E" not in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_unauthorized_program_run_unauthorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="IEFBR14", auth=False, @@ -1820,7 +1867,7 @@ def test_unauthorized_program_run_unauthorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 assert "BGYSC0215E" not in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_missing_program_name(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 1a4994800..9860e6d12 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -18,8 +18,8 @@ import ansible.constants import ansible.errors import ansible.utils +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name -DEFAULT_TEMP_DATASET="imstestl.ims1.temp.ps" def test_zos_tso_command_run_help(ansible_zos_module): hosts = ansible_zos_module @@ -49,8 +49,9 @@ def test_zos_tso_command_long_command_128_chars(ansible_zos_module): def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): hosts = ansible_zos_module + default_temp_dataset = get_tmp_ds_name() command_string = [ - "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(DEFAULT_TEMP_DATASET) + "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(default_temp_dataset) ] results_allocate = hosts.all.zos_tso_command(commands=command_string) # Validate the correct allocation of dataset @@ -59,34 +60,34 @@ def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): assert item.get("rc") == 0 assert result.get("changed") is True # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command - results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)]) + results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(default_temp_dataset)]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate LISTDS using alias param 'command' - results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)) + results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(default_temp_dataset)) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate LISTCAT command and an unauth command results = hosts.all.zos_tso_command( - commands=["LISTCAT ENT('{0}')".format(DEFAULT_TEMP_DATASET)] + commands=["LISTCAT ENT('{0}')".format(default_temp_dataset)] ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate remove dataset - results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(default_temp_dataset)]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Expect the tso_command to fail here because the previous command will have already deleted the data set # Validate data set was removed by previous call - results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(default_temp_dataset)]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 8 diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 2faba0023..c0b1fe293 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -18,6 +18,7 @@ import pytest import tempfile from tempfile import mkstemp +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -27,10 +28,6 @@ f"{USS_TEMP_DIR}/bar.txt": "bar sample content", f"{USS_TEMP_DIR}/empty.txt":""} USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" -TEST_PS = "USER.PRIVATE.TESTDS" -TEST_PDS = "USER.PRIVATE.TESTPDS" -HLQ = "USER" -MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" USS_DEST_ARCHIVE = "testarchive.dzp" @@ -347,7 +344,7 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): """ - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -355,9 +352,9 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( @@ -369,12 +366,15 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3) + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=DATASET, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -384,7 +384,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{DATASET}({member})", type="member", state="present" ) @@ -396,19 +396,19 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{DATASET}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{DATASET}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) if format == "terse": format_dict["format_options"] = dict(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), + src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, - dest_data_set=dict(name=data_set.get("name"), + dest_data_set=dict(name=DATASET, type="SEQ", record_format=record_format, record_length=record_length), @@ -417,12 +417,12 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + assert DATASET in result.get("archived") + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -431,7 +431,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec src=MVS_DEST_ARCHIVE, format=format_dict, remote_src=True, - dest_data_set=dict(name=data_set.get("name"), + dest_data_set=dict(name=DATASET, type=data_set.get("dstype"), record_format=record_format, record_length=record_length), @@ -442,19 +442,19 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec assert result.get("failed", False) is False # assert result.get("dest") == MVS_DEST_ARCHIVE # assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): - assert data_set.get("name") in c_result.get("stdout") + assert DATASET in c_result.get("stdout") # Check data integrity after unarchive cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") for result in cat_result.contacted.values(): assert result.get("stdout") == test_line finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -462,9 +462,9 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( @@ -476,12 +476,15 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3) + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=DATASET, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -491,7 +494,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{DATASET}({member})", type="member", state="present" ) @@ -503,9 +506,9 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{DATASET}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{DATASET}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -513,7 +516,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), + src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -521,12 +524,12 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + assert DATASET in result.get("archived") + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -543,14 +546,14 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d assert result.get("failed", False) is False # assert result.get("dest") == MVS_DEST_ARCHIVE # assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): - assert data_set.get("name") in c_result.get("stdout") + assert DATASET in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -558,16 +561,19 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQ ="ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -592,13 +598,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) - # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -615,16 +620,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -632,16 +637,19 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQUA = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -666,7 +674,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -675,12 +683,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, assert result.get("failed", False) is False # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] # Unarchive action - include_ds = "{0}0".format(data_set.get("name")) + include_ds = "{0}0".format(DATASET) unarchive_result = hosts.all.zos_unarchive( src=MVS_DEST_ARCHIVE, format=format_dict, @@ -694,7 +702,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQUA)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: if target_ds.get("name") == include_ds: @@ -704,10 +712,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, assert target_ds.get("name") not in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -715,16 +723,19 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQUA = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -749,18 +760,18 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] # Unarchive action - exclude_ds = "{0}0".format(data_set.get("name")) + exclude_ds = "{0}0".format(DATASET) unarchive_result = hosts.all.zos_unarchive( src=MVS_DEST_ARCHIVE, format=format_dict, @@ -773,7 +784,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd=""" dls "{0}.*" """.format(HLQUA)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: if target_ds.get("name") == exclude_ds: @@ -783,10 +794,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -794,16 +805,19 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -828,13 +842,13 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -851,16 +865,16 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="""drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -868,9 +882,9 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) @pytest.mark.parametrize( @@ -886,8 +900,11 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f """ try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQUA = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -912,7 +929,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -933,7 +950,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQUA)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: assert target_ds.get("name") in result.get("targets") @@ -942,10 +959,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f assert result.get("changed") is False assert result.get("failed", False) is True finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -953,9 +970,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( @@ -967,13 +984,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3) + HLQ = "ANSIBLE" tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=DATASET, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -983,7 +1003,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{DATASET}({member})", type="member", state="present" ) @@ -995,9 +1015,9 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{DATASET}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{DATASET}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -1005,19 +1025,19 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), + src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, ) for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + assert DATASET in result.get("archived") + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") # fetch archive data set into tmp folder fetch_result = hosts.all.zos_fetch(src=MVS_DEST_ARCHIVE, dest=tmp_folder.name, is_binary=True) @@ -1039,9 +1059,9 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da assert result.get("failed", False) is False # assert result.get("dest") == MVS_DEST_ARCHIVE # assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): - assert data_set.get("name") in c_result.get("stdout") + assert DATASET in c_result.get("stdout") # Check data integrity after unarchive cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") @@ -1050,7 +1070,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") tmp_folder.cleanup() @@ -1075,6 +1095,5 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): for result in unarchive_result.contacted.values(): assert result.get("changed") is False assert result.get("failed", False) is True - print(result) finally: tmp_folder.cleanup() diff --git a/tests/helpers/dataset.py b/tests/helpers/dataset.py new file mode 100644 index 000000000..c8050516a --- /dev/null +++ b/tests/helpers/dataset.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import pytest +import string +import random +import time +import re + +def get_tmp_ds_name(mlq_size=7, llq_size=7): + """ Function or test to ensure random names of datasets + the values of middle and last qualifier can change size by parameter, + but by default includes one letter.""" + ds = "ANSIBLE" + "." + ds += "P" + get_random_q(mlq_size).upper() + "." + ds += "T" + str(int(time.time()*1000))[-7:] + "." + ds += "C" + get_random_q(llq_size).upper() + return ds + + +def get_random_q(size=7): + """ Function or test to ensure random hlq of datasets""" + # Generate the first random hlq of size pass as parameter + letters = string.ascii_uppercase + string.digits + random_q = ''.join(random.choice(letters)for iteration in range(size)) + count = 0 + # Generate a random HLQ and verify if is valid, if not, repeat the process + while count < 5 and not re.fullmatch( + r"^(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})", + random_q, + re.IGNORECASE, + ): + random_q = ''.join(random.choice(letters)for iteration in range(size)) + count += 1 + return random_q \ No newline at end of file diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py new file mode 100644 index 000000000..b0ed97d30 --- /dev/null +++ b/tests/helpers/volumes.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import pytest +import time +import yaml + +class Volume: + """ Volume class represents a volume on the z system, it tracks if the volume name + and status of the volume with respect to the current test session.""" + def __init__(self, name): + self.name = name + self.in_use = False + + def __str__(self): + return f'The volume {self.name} is in {self.in_use} in use' + + def use(self): + self.in_use = True + + def free(self): + self.in_use = False + +class Volume_Handler: + """ Class to manage use of the volumes generated by a session.""" + def __init__(self, list_volumes): + self.volumes = list_volumes + def init_volumes(list_volumes): + list_volumes = [] + for volume in self.volumes: + list_volumes.append(Volume(volume)) + return list_volumes + self.volumes =init_volumes(list_volumes) + + def get_available_vol(self): + """ Check in the list of volumes one on use or not, also send a default + volume 0 as is the one with more tracks available.""" + for volume in self.volumes: + if not (volume.in_use): + volume.use() + return volume.name + print("Not more volumes in disposal return volume 000000") + return "000000" + + def free_vol(self, vol): + """ Check from the array the volume is already free for other test to use.""" + for volume in self.volumes: + if volume.name == vol: + volume.free() + + def init_volumes(self): + list_volumes = [] + for volume in self.volumes: + list_volumes.append(Volume(volume)) + self.volumes =list_volumes + + +def get_volumes(ansible_zos_module, path): + """Get an array of available volumes""" + # Using the command d u,dasd,online to fill an array of available volumes with the priority + # of of actives (A) and storage (STRG) first then online (O) and storage and if is needed, the + # private ones but actives then to get a flag if is available or not every volumes + # is a instance of a class to manage the use. + hosts = ansible_zos_module + list_volumes = [] + storage_online = [] + flag = False + iteration = 5 + prefer_vols = read_test_config(path) + # The first run of the command d u,dasd,online,,n in the system can conclude with empty data + # to ensure get volumes is why require not more 5 runs and lastly one second of wait. + while not flag and iteration > 0: + all_volumes = hosts.all.zos_operator(cmd="d u,dasd,online,,65536") + time.sleep(1) + if all_volumes is not None: + for volume in all_volumes.contacted.values(): + all_volumes = volume.get('content') + flag = True if len(all_volumes) > 5 else False + iteration -= 1 + # Check if the volume is of storage and is active on prefer but also online as a correct option + for info in all_volumes: + if "ACTIVATED" in info or "-D U," in info or "UNIT" in info: + continue + vol_w_info = info.split() + if vol_w_info[2] == 'O' and vol_w_info[4] == "STRG/RSDNT": + storage_online.append(vol_w_info[3]) + # Insert a volumes for the class ls_Volumes to give flag of in_use and correct manage + for vol in storage_online: + list_volumes.append(vol) + if prefer_vols is not None: + list(map(str, prefer_vols)) + prefer_vols.extend(list_volumes) + prefer_vols = list(filter(lambda item: item is not None, prefer_vols)) + return prefer_vols + else: + return list_volumes + + +def read_test_config(path): + p = path + with open(p, 'r') as file: + config = yaml.safe_load(file) + if "VOLUMES" in config.keys(): + if len(config["VOLUMES"]) > 0: + return config["VOLUMES"] + else: + return None \ No newline at end of file From 4772f7862198b6648209b4ae0d42f324056e1de2 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 6 Feb 2024 12:53:21 -0800 Subject: [PATCH 296/413] Cherry picked 1.9.0 beta.1 into dev (#1207) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Merge Staging release v1.9.0 beta.1 into main (#1205) * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bugfix/619/mode set for files applied test case (#757) * Add test case for copy dest file * Add comments * Add test for folders * Adjust spaces * Changes for ensure consistency for all tests * Changes of name and clean creations --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Bugfix/381/failed when the job name was null or not found (#747) * Add the verbose for failed when job name was null or not found * Adjust message for what we can get * Whitespaces move * Add code from dev * Ecode utility as is in dev * Year for copyright * Case for having both the jod_id and job_name * Ecode utils functions not in my branch * Add final line ecode * Add fragment * Delete encode function two times, adjust job message and change the fragment * Change variable name for one more descriptive * Restore encode and change one word * Encode * bugfixes * Set up as dev * Better fragment --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Bugfix/660/zos operator reported failure caused by unrelated error response messages (#762) * Add options * Add transparency on the response and test cases * Solve spaces * Add validation to append * Fragment Added * Adjust fail_json on non_zero response * Identation mistakes solved * Solve last idenation problem * Replace prior tooling (makefile) that aidded the development workflow with a new 'ac' command. (#766) * Make file mount script helper Signed-off-by: ddimatos <dimatos@gmail.com> * Comments to mount script Signed-off-by: ddimatos <dimatos@gmail.com> * Staged updated scripts for makefile usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update mount scripts for use with makefile Signed-off-by: ddimatos <dimatos@gmail.com> * updates to correct mounts and add function to mounts-datasets Signed-off-by: ddimatos <dimatos@gmail.com> * adding completed new ac command files for development Signed-off-by: ddimatos <dimatos@gmail.com> * update ignore to more specific with venv Signed-off-by: ddimatos <dimatos@gmail.com> * Correcting ignore to allow for venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * moved logic that checks for info.env to venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Adding changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a path issue when calling venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes issue not being able to run all tests, fixes issue with content being written to collections folder Signed-off-by: ddimatos <dimatos@gmail.com> * Support zSH and update scp to fall back to legacy scp protocal Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Fix incorrect message and remove the cd's before and after ac-test Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 347 new query fields (#778) * changing job.py to return 7 more fields, and for zos_job_query to pass them through * corrected testing to pull all new values through this assumes zoau 1.2.3 and z/OS at least 2.4 need to test older zoau to make sure this will still work * Added zoau version testing import to job.py so it won't reach for non-existent members. * pep8 and lint required changes * changed test to see if it will pass unit testing * Modified test_zos_data_set_func to skip HFS test if zOS > 02.04 * changed OS test for hfs usage * corrected usage of 'hosts'... removed the definition in prior edit. * changing OS version checker * corrected string extraction for OS version checker * added delete shell to 196/197 (finally of cat/uncat test) removed success message from 830 (version test logic) * removed the mvscmdauth call, as it coincides with some new test failures. * added changed=false back into testing of job_query * correction of zos->zoau name in comments. * Missing fragment in PR 778 New query fields (#780) * added fragment for pr 778 * Added changelog fragment query new fields Added changelog fragment query new fields * Update 778-query-new-fields.yml * Update docs with ansible/ansible-core version, AAP and fix the dated git issue templates (#771) * Doc vesion updates Signed-off-by: ddimatos <dimatos@gmail.com> * Repository template updates and future proofing Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Formatting corrections for release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Upate issue templates with newer version of software Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command supporting files (#789) * Update ac command supporting files Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> * Add recently changed module doc from prior commits Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> * Encode files recursively and test case for keep behavior. (#772) * Bring the jinja2 solution to dev and add test case * Add fragment * Solve problem z/OS 2.5 HFS * Declaration error solve * Need to check the validation with HFS * Ensure validating z/OS work with HFS * Change inecesary changes and fragments q * Return all test cases to normal * Return all test cases to normal * Create the local test case * Add local test case and change test case to be acurate * Get better cleanup of test-case * Update test_zos_data_set_func.py Equalize test mount func * Update ac to support a single test (#793) * Update ac to support a single test Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update test description Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Return the dynamically created destination attributes (#773) * First iteration to get dynamic values * Spaces and lines rectified * Add validation and extra variable to ensure consistency * Whitespaces * Change imports in test_zos_mount_func * Update test_zos_fetch_func imports * Update all imports for pipelines runs * Revert "Update all imports for pipelines runs" This reverts commit 1b370a2ba3c0001c316e0121ddab82ae7cc6d75d. Return one commit * Update data_set.py imports * Revert "Update data_set.py imports" This reverts commit 37561b0a12e04faaee8307a5541b71469dbe721d. * Update data_set imports * Update data_set imports * Update data_set imports * Restore import * Restore the imports * Add fragment * Solve a typo * Solve z/OS 2.5 HFS * Solve declaration error * Solve HFS and solution by now * Ensure HFS working with HFS * Better working on HFS testing problems * Change to cover many cases and add test * Modified changelog, corrected typos and shortemed file name * Delete 773-Return-the-dynamically-created-destintation-attributres.yaml * Update test_zos_data_set_func.py * Add documentation * Adjust spaces * Solve spaces in documentation * Solve problems on spaces in documentation * Adjust fragment and add validation for vsams * Better redaction to documentation * Solve spaces * Change documentation of code and collection * Change words in documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Updated ac command to clean up the collections directory Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> * Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> * added changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog based on PR feedback Signed-off-by: ddimatos <dimatos@gmail.com> * Increase ansible supported version to 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> * remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Change the line for the functional one (#805) * Add ansible-lint tooling added (#812) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> * add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * missing hyphen from command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command with doc corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 439 addf (#821) * initial changes to support F format * adding F option, added basic test looking for failure during ensure-present * added print_results to a failing uncatalog test. * adding more preint_result statements to track down cataloging issue * removed other print statements, added one back (cataloging is just plain finicky) * corrected volume name on new test * removed extra print statement from test code. Added Changelog fragment. * Expanded test case to try 1 of each record format creation. Added mention of 'F' into the documentation of record_format in dataset.py * Bugfix/769/mode option does not behave the same way that it does in the community module (#795) * First suggestion * Add files to be overwriten to the files to be changed * Add functionality to test case to ensure behaivour * Add test case for keep behaivour * Delete test repetition * Delete test case from other branch * Change test cases to ensure works as ansible module * Add fragment and change variable names for clarity * Get better test case and comments * Restore test --------- Co-authored-by: Demetri <dimatos@gmail.com> * bugfix/823/Return destination attributes had hardcoded type and record format (#824) * Add solution * Add fragment * Bufix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#806) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Changlog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> * remove verbosity from test Signed-off-by: ddimatos <dimatos@gmail.com> * correct indentation Signed-off-by: ddimatos <dimatos@gmail.com> * update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Stagging v1.6.0 merge into dev (#832) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstrin… * Remove changelog fragments not needed left in main Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml You are currently cherry-picking commit d20097b. Changes to be committed: deleted: changelogs/fragments/1016-remove-randint.yml deleted: changelogs/fragments/1036-apf-try-except.yml deleted: changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml deleted: changelogs/fragments/1042-missing-zoau-imports.yml deleted: changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml deleted: changelogs/fragments/1045-local-uss-unarchive.yml deleted: changelogs/fragments/1048-Update_sanity_tests_ignore.yml deleted: changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml deleted: changelogs/fragments/1049-xmit-temporary-data-sets.yml deleted: changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml deleted: changelogs/fragments/1052-try-except-pass-dd-statement.yml deleted: changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml deleted: changelogs/fragments/1055-remove-subprocess-encode.yml deleted: changelogs/fragments/1056-Update_sanity_ignore_2_16.yml deleted: changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml deleted: changelogs/fragments/1060-remote_tmp_zos_script.yml deleted: changelogs/fragments/1064-corruped-second-copy.yml deleted: changelogs/fragments/1065-rexx-exec-tso_command.yml deleted: changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml deleted: changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml deleted: changelogs/fragments/1074-improve-job-submit-error-msgs.yml deleted: changelogs/fragments/1077-modify-uss-extraction.yml deleted: changelogs/fragments/1089-update-managed_node_doc.yml deleted: changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml deleted: changelogs/fragments/1101-fix-undefined-var.yml deleted: changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml deleted: changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml deleted: changelogs/fragments/1176-copy-members.yml deleted: changelogs/fragments/1195-Add_prefer_volumes_user.yml deleted: changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml deleted: changelogs/fragments/977-remove-hard-coded-vols-and-datasets.yml deleted: changelogs/fragments/v1.9.0-beta.1_summary.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .ansible-lint | 8 ++ .gitignore | 2 +- CHANGELOG.rst | 44 ++++++++- README.md | 12 ++- ac | 6 +- changelogs/.plugin-cache.yaml | 7 +- changelogs/changelog.yaml | 99 ++++++++++++++++++- changelogs/fragments/1016-remove-randint.yml | 5 - changelogs/fragments/1036-apf-try-except.yml | 4 - ...bmit-job-honor-return-output-literally.yml | 4 - .../fragments/1042-missing-zoau-imports.yml | 10 -- ...-is-passing-wrong-value-to-zoauopercmd.yml | 8 -- .../fragments/1045-local-uss-unarchive.yml | 5 - .../1048-Update_sanity_tests_ignore.yml | 8 -- .../1048-update-ac-tool-pyyaml-version.yml | 8 -- .../1049-xmit-temporary-data-sets.yml | 4 - .../1051-try-except-pass-zos_mvs_raw.yml | 4 - .../1052-try-except-pass-dd-statement.yml | 4 - ..._test_collections_on_ansible_core_2_16.yml | 4 - .../1055-remove-subprocess-encode.yml | 4 - .../1056-Update_sanity_ignore_2_16.yml | 4 - ...ti_line_quoted_string_in_content_field.yml | 12 --- .../fragments/1060-remote_tmp_zos_script.yml | 5 - .../fragments/1064-corruped-second-copy.yml | 5 - .../fragments/1065-rexx-exec-tso_command.yml | 4 - ...nt_mvs_copy_destination_attrs_match_up.yml | 5 - ...emote_temporary_files_after_completion.yml | 4 - .../1074-improve-job-submit-error-msgs.yml | 3 - .../fragments/1077-modify-uss-extraction.yml | 3 - .../1089-update-managed_node_doc.yml | 3 - ...cumented_argument_and_import_exception.yml | 10 -- .../fragments/1101-fix-undefined-var.yml | 3 - ...s-fetch-find-remove-hardcoded-datasets.yml | 4 - ...s_to_use_new_alias_and_execute_options.yml | 3 - .../1200-zos_backup_restore-sanity-issues.yml | 4 - docs/source/conf.py | 2 +- docs/source/modules/zos_apf.rst | 4 + docs/source/modules/zos_archive.rst | 3 + docs/source/modules/zos_backup_restore.rst | 3 + docs/source/modules/zos_blockinfile.rst | 3 + docs/source/modules/zos_copy.rst | 16 +++ docs/source/modules/zos_data_set.rst | 4 + docs/source/modules/zos_encode.rst | 2 + docs/source/modules/zos_fetch.rst | 1 + docs/source/modules/zos_gather_facts.rst | 1 + docs/source/modules/zos_job_submit.rst | 5 + docs/source/modules/zos_lineinfile.rst | 4 + docs/source/modules/zos_mount.rst | 1 + docs/source/modules/zos_mvs_raw.rst | 8 ++ docs/source/modules/zos_operator.rst | 1 + .../modules/zos_operator_action_query.rst | 1 + docs/source/modules/zos_script.rst | 4 + docs/source/modules/zos_tso_command.rst | 1 + docs/source/modules/zos_unarchive.rst | 4 + docs/source/release_notes.rst | 86 +++++++++++++++- docs/templates/module.rst.j2 | 2 +- galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 4 +- plugins/action/zos_job_submit.py | 2 +- plugins/modules/zos_operator.py | 16 +-- plugins/modules/zos_operator_action_query.py | 16 +-- scripts/venv.sh | 2 +- tests/config.yml | 34 +++++++ .../modules/test_zos_tso_command_func.py | 2 +- 64 files changed, 366 insertions(+), 190 deletions(-) delete mode 100644 changelogs/fragments/1016-remove-randint.yml delete mode 100644 changelogs/fragments/1036-apf-try-except.yml delete mode 100644 changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml delete mode 100644 changelogs/fragments/1042-missing-zoau-imports.yml delete mode 100644 changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml delete mode 100644 changelogs/fragments/1045-local-uss-unarchive.yml delete mode 100644 changelogs/fragments/1048-Update_sanity_tests_ignore.yml delete mode 100644 changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml delete mode 100644 changelogs/fragments/1049-xmit-temporary-data-sets.yml delete mode 100644 changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml delete mode 100644 changelogs/fragments/1052-try-except-pass-dd-statement.yml delete mode 100644 changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml delete mode 100644 changelogs/fragments/1055-remove-subprocess-encode.yml delete mode 100644 changelogs/fragments/1056-Update_sanity_ignore_2_16.yml delete mode 100644 changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml delete mode 100644 changelogs/fragments/1060-remote_tmp_zos_script.yml delete mode 100644 changelogs/fragments/1064-corruped-second-copy.yml delete mode 100644 changelogs/fragments/1065-rexx-exec-tso_command.yml delete mode 100644 changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml delete mode 100644 changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml delete mode 100644 changelogs/fragments/1074-improve-job-submit-error-msgs.yml delete mode 100644 changelogs/fragments/1077-modify-uss-extraction.yml delete mode 100644 changelogs/fragments/1089-update-managed_node_doc.yml delete mode 100644 changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml delete mode 100644 changelogs/fragments/1101-fix-undefined-var.yml delete mode 100644 changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml delete mode 100644 changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml delete mode 100644 changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml create mode 100644 tests/config.yml diff --git a/.ansible-lint b/.ansible-lint index 7325803a2..821806e3a 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -1,6 +1,11 @@ +################################################################################ +# Copyright (c) IBM Corporation 2024 +################################################################################ +# For additonal doc, see https://ansible.readthedocs.io/projects/lint/configuring/ exclude_paths: - .tar.gz - __pycache__/ + - .ansible-lint - .cache/ - .DS_Store - .git/ @@ -35,3 +40,6 @@ parseable: true quiet: false use_default_rules: true verbosity: 1 +# Offline mode disables installation of requirements.yml and schema refreshing often +# found in project_root/collections/requirements.yml. +offline: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9c4301951..77064aff1 100644 --- a/.gitignore +++ b/.gitignore @@ -245,7 +245,6 @@ venv/ ENV/ env.bak/ venv.bak/ - ################################### # Ansible z/OS Core Development # ################################### @@ -256,6 +255,7 @@ venv.bak/ .pytest_cache info.env shell_exploits.txt +importer_result.json ################################################################################ # Debugging .ignore, if you want to know why a particular file is being ignored diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a5883246e..505a98474 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,42 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.9.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2024-01-31' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). +- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). +- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). +- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator. +- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + +Bugfixes +-------- + +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). +- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + v1.8.0 ====== @@ -29,18 +65,18 @@ Minor Changes - zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) - zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). - zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) -- zos_script - add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). -- zos_submit_job - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). +- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). +- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) Deprecated Features ------------------- -- zos_blockinfile - debug is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). +- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). Bugfixes -------- diff --git a/README.md b/README.md index 947740ad5..da3b114d4 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ The **IBM z/OS core collection** is following the **Red Hat® Ansible Certified Content for IBM Z®** method of distributing content. Collections will be developed in the open, and when content is ready for use, it is released to -[Ansible Galaxy](https://galaxy.ansible.com/search?keywords=zos_&order_by=-relevance&deprecated=false&type=collection&page=1) +[Ansible Galaxy](https://galaxy.ansible.com/ui/) for community adoption. Once contributors review community usage, feedback, and are satisfied with the content published, the collection will then be released to [Ansible Automation Hub](https://www.ansible.com/products/automation-hub) @@ -62,9 +62,17 @@ For **Ansible Automation Platform** (AAP) users, review the and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) for more more information on supported versions of Ansible. +Other Dependencies +================== +This release of the **IBM z/OS core collection** requires the z/OS managed node have: +- [z/OS](https://www.ibm.com/docs/en/zos) V2R4 or later. +- [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). +- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. +- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) 1.2.5 (or later) but prior to version 1.3. + Copyright ========= -© Copyright IBM Corporation 2020-2023. +© Copyright IBM Corporation 2020-2024. License ======= diff --git a/ac b/ac index 1d06757f5..b5febedbb 100755 --- a/ac +++ b/ac @@ -70,7 +70,7 @@ MAG=$'\e[1;35m' CYN=$'\e[1;36m' ENDC=$'\e[0m' # 0 Docker is up, 1 docker is not up -DOCKER_INFO=`docker info> /dev/null 2>&1;echo $?` +DOCKER_INFO=`podman info> /dev/null 2>&1;echo $?` # ============================================================================== # Arg parsing helpers @@ -298,12 +298,12 @@ ac_sanity(){ if [ "${DOCKER_INFO}" == "0" ]; then if [ "${option_version}" ]; then message "Running ansible-test with docker container and python version ${option_version}." - . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ ${VENV_BIN}/ansible-test sanity --python ${option_version} --requirements --docker default && \ cd ${CURR_DIR}; else message "Running ansible-test with docker container and all python versions." - . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ ${VENV_BIN}/ansible-test sanity --requirements --docker default && \ cd ${CURR_DIR}; fi diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 810d65965..899014cd9 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -106,6 +106,11 @@ plugins: name: zos_ping namespace: '' version_added: 1.1.0 + zos_script: + description: Run scripts in z/OS + name: zos_script + namespace: '' + version_added: 1.8.0 zos_tso_command: description: Execute TSO commands name: zos_tso_command @@ -126,4 +131,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.7.0 +version: 1.9.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 35eeaebb0..c05af6436 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1043,12 +1043,12 @@ releases: terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). minor_changes: - - zos_script - Add support for remote_tmp from the Ansible configuration to - setup where temporary files will be created, replacing the module option tmp_path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1068). - zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_script - Add support for remote_tmp from the Ansible configuration to + setup where temporary files will be created, replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1068). - zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). release_summary: 'Release Date: ''2023-12-08'' @@ -1176,3 +1176,96 @@ releases: name: zos_script namespace: '' release_date: '2023-10-24' + 1.9.0-beta.1: + changes: + bugfixes: + - zos_copy - When copying an executable data set with aliases and destination + did not exist, destination data set was created with wrong attributes. Fix + now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). + - zos_job_output - When passing a job ID or name less than 8 characters long, + the module sent the full stack trace as the module's message. Change now allows + the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_query - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + - zos_job_query - When passing a job ID or name less than 8 characters long, + the module sent the full stack trace as the module's message. Change now allows + the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_operator - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + - zos_unarchive - Using a local file with a USS format option failed when sending + to remote because dest_data_set option had an empty dictionary. Fix now leaves + dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). + - zos_unarchive - When unarchiving USS files, the module left temporary files + on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + minor_changes: + - zos_apf - Improves exception handling if there is a failure parsing the command + response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). + - zos_copy - Improve zos_copy performance when copying multiple members from + one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). + - zos_job_output - When passing a job ID and owner the module take as mutually + exclusive. Change now allows the use of a job ID and owner at the same time. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). + - zos_job_submit - The module had undocumented parameter and uses as temporary + file when the location of the file is LOCAL. Change now uses the same name + as the src for the temporary file removing the addition of tmp_file to the + arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + - zos_job_submit - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + - zos_mvs_raw - when using the dd_input content option for instream-data, if + the content was not properly indented according to the program which is generally + a blank in columns 1 & 2, those columns would be truncated. Now, when setting + instream-data, the module will ensure that all lines contain a blank in columns + 1 and 2 and add blanks when not present while retaining a maximum length of + 80 columns for any line. This is true for all content types; string, list + of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). + - zos_mvs_raw - no examples were included with the module that demonstrated + using a YAML block indicator, this now includes examples using a YAML block + indicator. + - zos_tso_command - add example for executing explicitly a REXX script from + a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + release_summary: 'Release Date: ''2024-01-31'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1016-remove-randint.yml + - 1036-apf-try-except.yml + - 1042-missing-zoau-imports.yml + - 1045-local-uss-unarchive.yml + - 1048-Update_sanity_tests_ignore.yml + - 1048-update-ac-tool-pyyaml-version.yml + - 1051-try-except-pass-zos_mvs_raw.yml + - 1052-try-except-pass-dd-statement.yml + - 1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml + - 1055-remove-subprocess-encode.yml + - 1056-Update_sanity_ignore_2_16.yml + - 1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml + - 1064-corruped-second-copy.yml + - 1065-rexx-exec-tso_command.yml + - 1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml + - 1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml + - 1074-improve-job-submit-error-msgs.yml + - 1077-modify-uss-extraction.yml + - 1078-short_job_name_sends_back_a_value_error.yaml + - 1091-Update_undocumented_argument_and_import_exception.yml + - 1101-fix-undefined-var.yml + - 1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml + - 1163-Refactor_calls_to_use_new_alias_and_execute_options.yml + - 1176-copy-members.yml + - 1195-Add_prefer_volumes_user.yml + - 1200-zos_backup_restore-sanity-issues.yml + - 977-remove-hard-coded-vols-and-datasets.yml + - v1.9.0-beta.1_summary.yml + release_date: '2024-02-01' diff --git a/changelogs/fragments/1016-remove-randint.yml b/changelogs/fragments/1016-remove-randint.yml deleted file mode 100644 index baac7fff9..000000000 --- a/changelogs/fragments/1016-remove-randint.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - module_utils/data_set - Replace the use of random.randint to random.sample - to generate random member names, random.randint raised a warning while - scanning with bandit. - (https://github.com/ansible-collections/ibm_zos_core/pull/1016) \ No newline at end of file diff --git a/changelogs/fragments/1036-apf-try-except.yml b/changelogs/fragments/1036-apf-try-except.yml deleted file mode 100644 index 16e8ab6c7..000000000 --- a/changelogs/fragments/1036-apf-try-except.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_apf - Improves exception handling if there is a failure - parsing the command response when operation selected is list. - (https://github.com/ansible-collections/ibm_zos_core/pull/1036). diff --git a/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml b/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml deleted file mode 100644 index 726397d2d..000000000 --- a/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_submit_job - Previous code did not return output, but still requested job data from the target system. - This changes to honor return_output=false by not querying the job dd segments at all. - (https://github.com/ansible-collections/ibm_zos_core/pull/1058). \ No newline at end of file diff --git a/changelogs/fragments/1042-missing-zoau-imports.yml b/changelogs/fragments/1042-missing-zoau-imports.yml deleted file mode 100644 index a91f6de48..000000000 --- a/changelogs/fragments/1042-missing-zoau-imports.yml +++ /dev/null @@ -1,10 +0,0 @@ -bugfixes: - - zos_job_query - The module handling ZOAU import errors obscured the - original traceback when an import error ocurred. Fix now passes correctly - the context to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - - - zos_operator - The module handling ZOAU import errors obscured the - original traceback when an import error ocurred. Fix now passes correctly - the context to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1042). \ No newline at end of file diff --git a/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml b/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml deleted file mode 100644 index 06f9a264a..000000000 --- a/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml +++ /dev/null @@ -1,8 +0,0 @@ -bugfixes: - - zos_operator - The module was ignoring the wait time argument. - The module now passes the wait time argument to ZOAU. - (https://github.com/ansible-collections/ibm_zos_core/pull/1044). - - - zos_operator_action_query - The module was ignoring the wait time argument. - The module now passes the wait time argument to ZOAU. - (https://github.com/ansible-collections/ibm_zos_core/pull/1044). \ No newline at end of file diff --git a/changelogs/fragments/1045-local-uss-unarchive.yml b/changelogs/fragments/1045-local-uss-unarchive.yml deleted file mode 100644 index 84bc5508c..000000000 --- a/changelogs/fragments/1045-local-uss-unarchive.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_unarchive - Using a local file with a USS format option failed when sending to - remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set - as None when using a USS format option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1045). \ No newline at end of file diff --git a/changelogs/fragments/1048-Update_sanity_tests_ignore.yml b/changelogs/fragments/1048-Update_sanity_tests_ignore.yml deleted file mode 100644 index 5d2960d28..000000000 --- a/changelogs/fragments/1048-Update_sanity_tests_ignore.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - zos_copy - change data type of parameter src from path to str inside AnsibleModule util. - - zos_copy - deprecate add_file_common_args argument. - - zos_copy - add owner and group to parameters inside AnsibleModule util. - - zos_copy - remove copy_member of AnsibleModule util as parameter and add to code logic. - - zos_copy - remove doc-default-does-not-match-spec ignore to ignore 2.14. - - zos_copy - remove doc-type-does-not-match-spec ignore to ignore 2.14. - (https://github.com/ansible-collections/ibm_zos_core/pull/1048). diff --git a/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml b/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml deleted file mode 100644 index 309862cfb..000000000 --- a/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - ac - PyYaml version 5.4.1 was being installed and not having a wheel to go - with the python versions 11 and 12. This fixes the issue by freezing the - the version to 6.0.1. - - ac - would not discover python installations not in PATH. This change - extends the search path to include common python installation locations - not in path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1083). diff --git a/changelogs/fragments/1049-xmit-temporary-data-sets.yml b/changelogs/fragments/1049-xmit-temporary-data-sets.yml deleted file mode 100644 index 5ef0f2078..000000000 --- a/changelogs/fragments/1049-xmit-temporary-data-sets.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the - temporary data sets created. Fix now removes the temporary data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/1049). \ No newline at end of file diff --git a/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml b/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml deleted file mode 100644 index 59b33d02c..000000000 --- a/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_mvs_raw - Removed Try, Except, Pass from the code, try block is in place to ignore any errors, - pass statement was changed to a variable assignment. This does not change any behavior. - (https://github.com/ansible-collections/ibm_zos_core/pull/1051). \ No newline at end of file diff --git a/changelogs/fragments/1052-try-except-pass-dd-statement.yml b/changelogs/fragments/1052-try-except-pass-dd-statement.yml deleted file mode 100644 index 42315337c..000000000 --- a/changelogs/fragments/1052-try-except-pass-dd-statement.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_mvs_raw - Removed Try, Except, Pass from the code, instead catching DatasetDeleteError - and pass only in that case, any other exception will be raised. - (https://github.com/ansible-collections/ibm_zos_core/pull/1052). diff --git a/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml b/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml deleted file mode 100644 index ac3c24bb5..000000000 --- a/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_archive - add missing-gplv3-license ignore to ignore 2.16. - - zos_unarchive - add missing-gplv3-license ignore to ignore 2.16. - (https://github.com/ansible-collections/ibm_zos_core/pull/1053). diff --git a/changelogs/fragments/1055-remove-subprocess-encode.yml b/changelogs/fragments/1055-remove-subprocess-encode.yml deleted file mode 100644 index 7e458dc09..000000000 --- a/changelogs/fragments/1055-remove-subprocess-encode.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - encode_utils - Removed use of subprocess from system utils, since the only - use of it could be replaced for AnsibleModule runcommand method. - (https://github.com/ansible-collections/ibm_zos_core/pull/1055). \ No newline at end of file diff --git a/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml b/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml deleted file mode 100644 index a5b192519..000000000 --- a/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_copy - remove doc-default-does-not-match-spec 2.16 ignore file. - - zos_copy - remove doc-type-does-not-match-spec 2.16 ignore file. - (https://github.com/ansible-collections/ibm_zos_core/pull/1056). diff --git a/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml b/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml deleted file mode 100644 index 49a3a3516..000000000 --- a/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml +++ /dev/null @@ -1,12 +0,0 @@ -minor_changes: - - zos_mvs_raw - when using the dd_input content option for instream-data, if - the content was not properly indented according to the program which is - generally a blank in columns 1 & 2, those columns would be truncated. Now, - when setting instream-data, the module will ensure that all lines contain - a blank in columns 1 and 2 and add blanks when not present while retaining - a maximum length of 80 columns for any line. This is true for all content - types; string, list of strings and when using a YAML block indicator. - (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - - zos_mvs_raw - no examples were included with the module that demonstrated - using a YAML block indicator, this now includes examples using a YAML - block indicator. diff --git a/changelogs/fragments/1060-remote_tmp_zos_script.yml b/changelogs/fragments/1060-remote_tmp_zos_script.yml deleted file mode 100644 index 1185f3a1b..000000000 --- a/changelogs/fragments/1060-remote_tmp_zos_script.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_script - add support for remote_tmp from the Ansible - configuration to setup where temporary files will be created, - replacing the module option tmp_path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1060). \ No newline at end of file diff --git a/changelogs/fragments/1064-corruped-second-copy.yml b/changelogs/fragments/1064-corruped-second-copy.yml deleted file mode 100644 index 82a04426e..000000000 --- a/changelogs/fragments/1064-corruped-second-copy.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - When performing a copy operation to an existing file, the copied - file resulted in having corrupted contents. Fix now implements a workaround - to not use the specific copy routine that corrupts the file contents. - (https://github.com/ansible-collections/ibm_zos_core/pull/1064). diff --git a/changelogs/fragments/1065-rexx-exec-tso_command.yml b/changelogs/fragments/1065-rexx-exec-tso_command.yml deleted file mode 100644 index 5d20ccfd6..000000000 --- a/changelogs/fragments/1065-rexx-exec-tso_command.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_tso_command - add example for executing explicitly a REXX script from - a data set. - (https://github.com/ansible-collections/ibm_zos_core/pull/1065). diff --git a/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml b/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml deleted file mode 100644 index 05e1c9ce4..000000000 --- a/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - When copying an executable data set with aliases and destination did not exist, - destination data set was created with wrong attributes. Fix now creates destination data set - with the same attributes as the source. - (https://github.com/ansible-collections/ibm_zos_core/pull/1066). \ No newline at end of file diff --git a/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml b/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml deleted file mode 100644 index 6532e60ae..000000000 --- a/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. - Change now removes temporary files. - (https://github.com/ansible-collections/ibm_zos_core/pull/1073). \ No newline at end of file diff --git a/changelogs/fragments/1074-improve-job-submit-error-msgs.yml b/changelogs/fragments/1074-improve-job-submit-error-msgs.yml deleted file mode 100644 index 769131a2b..000000000 --- a/changelogs/fragments/1074-improve-job-submit-error-msgs.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_job_submit - Improve error messages in zos_job_submit to be clearer. - (https://github.com/ansible-collections/ibm_zos_core/pull/1074). diff --git a/changelogs/fragments/1077-modify-uss-extraction.yml b/changelogs/fragments/1077-modify-uss-extraction.yml deleted file mode 100644 index 0886dfab1..000000000 --- a/changelogs/fragments/1077-modify-uss-extraction.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_unarchive - Change the USS file extraction method from extractall to a custom function to extract filtered members. - (https://github.com/ansible-collections/ibm_zos_core/pull/1077). \ No newline at end of file diff --git a/changelogs/fragments/1089-update-managed_node_doc.yml b/changelogs/fragments/1089-update-managed_node_doc.yml deleted file mode 100644 index e0c7ff18b..000000000 --- a/changelogs/fragments/1089-update-managed_node_doc.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - managed node doc - updated the managed node documentation links and content. - (https://github.com/ansible-collections/ibm_zos_core/pull/1089). diff --git a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml deleted file mode 100644 index d1d1560f8..000000000 --- a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_job_submit - The module handling ZOAU import errors obscured the - original traceback when an import error ocurred. Fix now passes correctly - the context to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1091). - - - zos_job_submit - The module had undocumented parameter and uses as temporary file - when the location of the file is LOCAL. Change now uses the same name as the src - for the temporary file removing the addition of tmp_file to the arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1091). diff --git a/changelogs/fragments/1101-fix-undefined-var.yml b/changelogs/fragments/1101-fix-undefined-var.yml deleted file mode 100644 index 1d9eeba3c..000000000 --- a/changelogs/fragments/1101-fix-undefined-var.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. - (https://github.com/ansible-collections/ibm_zos_core/pull/1101). \ No newline at end of file diff --git a/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml b/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml deleted file mode 100644 index 7a470d57c..000000000 --- a/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_fetch - remove hardcoded datasets and dependencies from test cases. - - zos_find - remove hardcoded datasets and dependencies from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1158). \ No newline at end of file diff --git a/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml b/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml deleted file mode 100644 index 6cd512427..000000000 --- a/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Change call to ZOAU python API by using a dictionary to arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1163). \ No newline at end of file diff --git a/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml b/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml deleted file mode 100644 index 27d40f560..000000000 --- a/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_backup_restore - Added space type choices to argument spec to remove - validate-modules:doc-choices-do-not-match-spec. - (https://github.com/ansible-collections/ibm_zos_core/pull/1200). diff --git a/docs/source/conf.py b/docs/source/conf.py index 9c7a6994e..f8dd69685 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -16,7 +16,7 @@ ############################################################################## project = 'IBM z/OS core collection' -copyright = '2020, 2021, IBM' +copyright = '2024, IBM' author = 'IBM' # The full version, including alpha/beta/rc tags diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 195b34611..e9a55c007 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -54,6 +54,7 @@ force_dynamic | **required**: False | **type**: bool + | **default**: False volume @@ -78,6 +79,7 @@ sms | **required**: False | **type**: bool + | **default**: False operation @@ -144,6 +146,7 @@ persistent | **required**: False | **type**: bool + | **default**: False backup_name @@ -203,6 +206,7 @@ batch | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 03eaafbae..525c7c0be 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -99,6 +99,7 @@ format | **required**: False | **type**: bool + | **default**: False @@ -180,6 +181,7 @@ remove | **required**: False | **type**: bool + | **default**: False dest_data_set @@ -326,6 +328,7 @@ force | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 7c9a8a876..cc6c60d66 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -103,6 +103,7 @@ full_volume | **required**: False | **type**: bool + | **default**: False temp_volume @@ -130,6 +131,7 @@ recover | **required**: False | **type**: bool + | **default**: False overwrite @@ -139,6 +141,7 @@ overwrite | **required**: False | **type**: bool + | **default**: False sms_storage_class diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index e1e11486c..f3eef5967 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -124,6 +124,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -171,6 +172,7 @@ force | **required**: False | **type**: bool + | **default**: False indentation @@ -178,6 +180,7 @@ indentation | **required**: False | **type**: int + | **default**: 0 diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 004671ebc..86a3a9463 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -41,6 +41,7 @@ asa_text | **required**: False | **type**: bool + | **default**: False backup @@ -52,6 +53,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -155,6 +157,7 @@ force | **required**: False | **type**: bool + | **default**: False force_lock @@ -168,6 +171,7 @@ force_lock | **required**: False | **type**: bool + | **default**: False ignore_sftp_stderr @@ -177,6 +181,7 @@ ignore_sftp_stderr | **required**: False | **type**: bool + | **default**: False is_binary @@ -188,6 +193,7 @@ is_binary | **required**: False | **type**: bool + | **default**: False executable @@ -203,6 +209,7 @@ executable | **required**: False | **type**: bool + | **default**: False aliases @@ -214,6 +221,7 @@ aliases | **required**: False | **type**: bool + | **default**: False local_follow @@ -246,6 +254,7 @@ remote_src | **required**: False | **type**: bool + | **default**: False src @@ -261,6 +270,8 @@ src If ``src`` is a directory and ends with "/", the contents of it will be copied into the root of ``dest``. If it doesn't end with "/", the directory itself will be copied. + If ``src`` is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. + If ``src`` is a VSAM data set, ``dest`` must also be a VSAM. Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. @@ -278,6 +289,7 @@ validate | **required**: False | **type**: bool + | **default**: False volume @@ -446,6 +458,7 @@ use_template | **required**: False | **type**: bool + | **default**: False template_parameters @@ -526,6 +539,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False trim_blocks @@ -543,6 +557,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False newline_sequence @@ -559,6 +574,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index ddcc97a8b..70e798a08 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -266,6 +266,7 @@ replace | **required**: False | **type**: bool + | **default**: False tmp_hlq @@ -288,6 +289,7 @@ force | **required**: False | **type**: bool + | **default**: False batch @@ -537,6 +539,7 @@ batch | **required**: False | **type**: bool + | **default**: False force @@ -550,6 +553,7 @@ batch | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 089208c5f..4c2294e24 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -86,6 +86,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -110,6 +111,7 @@ backup_compress | **required**: False | **type**: bool + | **default**: False tmp_hlq diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 66792fa87..21b573a2a 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -130,6 +130,7 @@ ignore_sftp_stderr | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 63bd22701..0247ffd96 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -52,6 +52,7 @@ filter | **required**: False | **type**: list | **elements**: str + | **default**: [] diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index e0fd8e2d1..6cff37a6a 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -66,6 +66,7 @@ wait | **required**: False | **type**: bool + | **default**: False wait_time_s @@ -151,6 +152,7 @@ use_template | **required**: False | **type**: bool + | **default**: False template_parameters @@ -231,6 +233,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False trim_blocks @@ -248,6 +251,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False newline_sequence @@ -264,6 +268,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index e352007df..f7005017e 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -83,6 +83,7 @@ backrefs | **required**: False | **type**: bool + | **default**: False insertafter @@ -140,6 +141,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -173,6 +175,7 @@ firstmatch | **required**: False | **type**: bool + | **default**: False encoding @@ -194,6 +197,7 @@ force | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 9ff7ba106..42e8a8ea6 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -113,6 +113,7 @@ persistent | **required**: False | **type**: bool + | **default**: False backup_name diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index fb6a1a726..3ebedadd5 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -51,6 +51,7 @@ auth | **required**: False | **type**: bool + | **default**: False verbose @@ -60,6 +61,7 @@ verbose | **required**: False | **type**: bool + | **default**: False dds @@ -156,6 +158,7 @@ dds | **required**: False | **type**: bool + | **default**: False replace @@ -173,6 +176,7 @@ dds | **required**: False | **type**: bool + | **default**: False backup @@ -182,6 +186,7 @@ dds | **required**: False | **type**: bool + | **default**: False space_type @@ -888,6 +893,7 @@ dds | **required**: False | **type**: bool + | **default**: False replace @@ -905,6 +911,7 @@ dds | **required**: False | **type**: bool + | **default**: False backup @@ -914,6 +921,7 @@ dds | **required**: False | **type**: bool + | **default**: False space_type diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index e0f65414f..9ad26d64c 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -42,6 +42,7 @@ verbose | **required**: False | **type**: bool + | **default**: False wait_time_s diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index acb06be50..b2e99d399 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -90,6 +90,7 @@ message_filter | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index bc8dff3c0..f51096361 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -113,6 +113,7 @@ use_template | **required**: False | **type**: bool + | **default**: False template_parameters @@ -193,6 +194,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False trim_blocks @@ -210,6 +212,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False newline_sequence @@ -226,6 +229,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index 846cb93d8..816a859e7 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -44,6 +44,7 @@ max_rc | **required**: False | **type**: int + | **default**: 0 diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index ae3b92516..da80bd31a 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -83,6 +83,7 @@ format | **required**: False | **type**: bool + | **default**: False dest_volumes @@ -172,6 +173,7 @@ list | **required**: False | **type**: bool + | **default**: False dest_data_set @@ -336,6 +338,7 @@ force | **required**: False | **type**: bool + | **default**: False remote_src @@ -345,6 +348,7 @@ remote_src | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index b198d74de..6770aa879 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,78 @@ Releases ======== +Version 1.9.0-beta.1 +==================== + +Minor Changes +------------- +- ``zos_apf`` - Improved exception handling when the module is unable to process a response originating as a batch update. +- ``zos_copy`` - Improved performance when copying multiple members from one PDS/E to another PDS/E. +- ``zos_job_output`` - Has been enhanced to allow for both a job ID and owner to be selected when obtaining job output, removing the prior mutual exclusivity. +- ``zos_operator`` - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. +- ``zos_job_query`` - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. +- ``zos_job_submit`` + + - Improved messages in the action plugin. + - Improved the action plugin performance, flow and use of undocumented variables. + - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. +- ``zos_tso_command`` - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. +- ``zos_mvs_raw`` + + - Has been enhanced to ensure that **instream-data** for option **dd_input** contain blanks in columns 1 and 2 while retaining a maximum length + of 80 columns for strings and a list of strings. This is generally the requirement for most z/OS programs. + - Has been updated with new examples demonstrating a YAML block indicator, often helpful when wanting to control the + **instream-data** formatting. + + +Bugfixes +-------- + +- ``zos_copy`` + + - Fixed an issue when copying an aliased executable from a data set to a non-existent data set, the destination data sets primary + and secondary extents would not match the source data set extent sizes. + - Fixed an issue when performing a copy operation to an existing file, the copied file resulted in having corrupted contents. + +- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix + supports shorter job IDs as well as the use of wildcards. + +- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix + supports shorter job IDs as well as the use of wildcards. + +- ``zos_unarchive`` + + - Fixed an issue when using a local file with the USS format option that would fail sending it to the managed node. + - Fixed an issue that occurred when unarchiving USS files that would leave temporary files behind on the managed node. + +Known Issues +------------ + +Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. + +This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, +``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is +unique, some options to work around the error are below. + +- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. +- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. +- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with + a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS®`_ V2R4 or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. + Version 1.8.0 ============= @@ -66,8 +138,16 @@ Bugfixes Known Issues ------------ -- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. -- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. +Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. + +This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, +``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is +unique, some options to work around the error are below. + +- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. +- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. +- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with + a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. Availability ------------ @@ -908,6 +988,8 @@ Known issues https://www.ibm.com/docs/en/zoau/1.2.x .. _Z Open Automation Utilities 1.2.4: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.2.5: + https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS®: diff --git a/docs/templates/module.rst.j2 b/docs/templates/module.rst.j2 index 7f31b536d..ec1c9bc1a 100644 --- a/docs/templates/module.rst.j2 +++ b/docs/templates/module.rst.j2 @@ -62,7 +62,7 @@ Synopsis {% if spec.elements %} {{ " " * level }}| **elements**: {{ spec.elements }} {% endif %} -{% if spec.default %} +{% if 'default' in spec and spec.default is not none and spec.default != ''%} {{ " " * level }}| **default**: {{ spec.default }} {% endif %} {% if spec.choices %} diff --git a/galaxy.yml b/galaxy.yml index f7be530c7..93af5d038 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.8.0 +version: 1.9.0-beta.1 # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index e1ee28246..abab47f9c 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.8.0" +version: "1.9.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.2.4" + - "1.2.5" diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index fa65f7318..4b0245b37 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -103,7 +103,7 @@ def run(self, tmp=None, task_vars=None): # if self._connection._shell.path_has_trailing_slash(dest): # dest_file = self._connection._shell.join_path(dest, source_rel) # else: - dest_file = self._connection._shell.join_path(dest_path) + self._connection._shell.join_path(dest_path) tmp_src = self._connection._shell.join_path(tmp, "source") diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 6281c5cd6..012a46c0c 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -161,16 +161,15 @@ BetterArgParser, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) + try: from zoautil_py import opercmd except Exception: opercmd = ZOAUImportError(traceback.format_exc()) -try: - from zoautil_py import ZOAU_API_VERSION -except Exception: - ZOAU_API_VERSION = "1.2.0" - def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): @@ -284,13 +283,8 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") - zv = ZOAU_API_VERSION.split(".") use_wait_arg = False - if zv[0] > "1": - use_wait_arg = True - elif zv[0] == "1" and zv[1] > "2": - use_wait_arg = True - elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": + if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): use_wait_arg = True if use_wait_arg: diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index a035cad33..022708692 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -226,16 +226,15 @@ MissingZOAUImport, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) + try: from zoautil_py import opercmd except Exception: opercmd = MissingZOAUImport() -try: - from zoautil_py import ZOAU_API_VERSION -except Exception: - ZOAU_API_VERSION = "1.2.0" - def run_module(): module_args = dict( @@ -262,13 +261,8 @@ def run_module(): wait_s = 5 - zv = ZOAU_API_VERSION.split(".") use_wait_arg = False - if zv[0] > "1": - use_wait_arg = True - elif zv[0] == "1" and zv[1] > "2": - use_wait_arg = True - elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": + if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): use_wait_arg = True if use_wait_arg: diff --git a/scripts/venv.sh b/scripts/venv.sh index 315e7a854..51426a055 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -297,7 +297,7 @@ find_in_path() { discover_python(){ # Don't use which, it only will find first in path within script # for python_found in `which python3 | cut -d" " -f3`; do - pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") + pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11") # "python3.12" "python3.13" "python3.14") #pys=("python3.8" "python3.9") for py in "${pys[@]}"; do for python_found in `find_in_path $py`; do diff --git a/tests/config.yml b/tests/config.yml new file mode 100644 index 000000000..ce73e796d --- /dev/null +++ b/tests/config.yml @@ -0,0 +1,34 @@ +################################################################################ +# Copyright (c) IBM Corporation 2024 +################################################################################ + +# ############################################################################# +# Description +# Support for this feature was first added in ansible-core 2.12 so that +# ansible-test configured with desirable changes. This is an optional +# configuration, but when used, must be placed in "tests/config.yml" +# relative to the base of the collection. This configuration only +# applies to modules and module_utils. +# +# See additional example - +# https://github.com/ansible/ansible/blob/devel/test/lib/ansible_test/config/config.yml +# +# Options +# modules - required +# python_requires - required +# - 'default' - All Python versions supported by Ansible. +# This is the default value if no configuration is provided. +# - 'controller' - All Python versions supported by the Ansible controller. +# This indicates the modules/module_utils can only run on the controller. +# Intended for use only with modules/module_utils that depend on +# ansible-connection, which only runs on the controller. +# Unit tests for modules/module_utils will be permitted to import any +# Ansible code, instead of only module_utils. +# - SpecifierSet - A PEP 440 specifier set indicating the supported Python versions. +# This is only needed when modules/module_utils do not support all +# Python versions supported by Ansible. It is not necessary to exclude +# versions which Ansible does not support, as this will be done automatically. +# ############################################################################# + +modules: + python_requires: '>=3.9' diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 9860e6d12..6f76ceb3f 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 34b21237567a5014c97aa927f6093a8af335b90d Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 9 Feb 2024 09:44:27 -0700 Subject: [PATCH 297/413] [v1.10.0] [zos_job_submit] Migrate zos_job_submit to ZOAU v1.3.0 (#1209) * Change jobs calls * Update exception handling * Remove deprecated wait option * Remove unneeded TODO * Update fetch calls to include the program name * Remove compatibility with previous versions of ZOAU * Disable tests that depend on zos_copy * Remove `wait` option from tests * Add note to test * Fix sanity issues * Add changelog fragment --- .../1209-zoau-migration-zos_job_submit.yml | 3 + plugins/module_utils/job.py | 51 +- plugins/modules/zos_job_submit.py | 105 ++--- .../modules/test_zos_job_submit_func.py | 439 ++++++++++-------- 4 files changed, 313 insertions(+), 285 deletions(-) create mode 100644 changelogs/fragments/1209-zoau-migration-zos_job_submit.yml diff --git a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml new file mode 100644 index 000000000..6f58e2713 --- /dev/null +++ b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Migrated the module to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1209). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 3d7d80d68..a854a0a1e 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -22,7 +22,6 @@ BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - # MissingZOAUImport, ZOAUImportError ) @@ -35,15 +34,8 @@ # from zoautil_py.jobs import read_output, list_dds, listing from zoautil_py import jobs except Exception: - # read_output = MissingZOAUImport() - # list_dds = MissingZOAUImport() - # listing = MissingZOAUImport() jobs = ZOAUImportError(traceback.format_exc()) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - zoau_version_checker -) - def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -248,19 +240,18 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] # creationdatetime=job[9] queueposition=job[10] - # starting in zoau 1.2.4, program_name[11] was added. - + # starting in zoau 1.2.4, program_name[11] was added. In 1.3.0, include_extended + # has to be set to true so we get the program name for a job. # Testing has shown that the program_name impact is minor, so we're removing that option - # This will also help maintain compatibility with 1.2.3 final_entries = [] - entries = jobs.fetch_multiple(job_id=job_id_temp) + entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = jobs.fetch_multiple(job_id=job_id_temp) + entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) if entries: for entry in entries: @@ -281,7 +272,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["system"] = "" job["owner"] = entry.owner - job["ret_code"] = {} + job["ret_code"] = dict() job["ret_code"]["msg"] = "{0} {1}".format(entry.status, entry.return_code) job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None @@ -290,26 +281,16 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["ret_code"]["code"] = int(entry.return_code) job["ret_code"]["msg_text"] = entry.status - # this section only works on zoau 1.2.3/+ vvv - # Beginning in ZOAU v1.3.0, the Job class changes svc_class to # service_class. - if zoau_version_checker.is_zoau_version_higher_than("1.2.5"): - job["service_class"] = entry.service_class - elif zoau_version_checker.is_zoau_version_higher_than("1.2.2"): - job["svc_class"] = entry.svc_class - if zoau_version_checker.is_zoau_version_higher_than("1.2.2"): - job["job_class"] = entry.job_class - job["priority"] = entry.priority - job["asid"] = entry.asid - job["creation_date"] = str(entry.creation_datetime)[0:10] - job["creation_time"] = str(entry.creation_datetime)[12:] - job["queue_position"] = entry.queue_position - if zoau_version_checker.is_zoau_version_higher_than("1.2.3"): - job["program_name"] = entry.program_name - - # this section only works on zoau 1.2.3/+ ^^^ - + job["svc_class"] = entry.service_class + job["job_class"] = entry.job_class + job["priority"] = entry.priority + job["asid"] = entry.asid + job["creation_date"] = str(entry.creation_datetime)[0:10] + job["creation_time"] = str(entry.creation_datetime)[12:] + job["queue_position"] = entry.queue_position + job["program_name"] = entry.program_name job["class"] = "" job["content_type"] = "" job["ret_code"]["steps"] = [] @@ -329,6 +310,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T for single_dd in list_of_dds: dd = {} + if "dataset" not in single_dd: + continue + # If dd_name not None, only that specific dd_name should be returned if dd_name is not None: if dd_name not in single_dd["dataset"]: @@ -336,9 +320,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T else: dd["ddname"] = single_dd["dataset"] - if "dataset" not in single_dd: - continue - if "recnum" in single_dd: dd["record_count"] = single_dd["recnum"] else: diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index aa0026069..1fd5030b5 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -54,17 +54,6 @@ - DATA_SET can be a PDS, PDSE, or sequential data set. - USS means the JCL location is located in UNIX System Services (USS). - LOCAL means locally to the ansible control node. - wait: - required: false - default: false - type: bool - description: - - Setting this option will yield no change, it is deprecated. There is no - no need to set I(wait); setting I(wait_times_s) is the correct way to - configure the amount of tme to wait for a job to execute. - - Configuring wait used by the L(zos_job_submit,./zos_job_submit.html) module has been - deprecated and will be removed in ibm.ibm_zos_core collection. - - See option I(wait_time_s). wait_time_s: required: false default: 10 @@ -620,6 +609,7 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text from timeit import default_timer as timer from os import remove import traceback @@ -627,9 +617,9 @@ import re try: - from zoautil_py import exceptions + from zoautil_py import exceptions as zoau_exceptions except ImportError: - exceptions = ZOAUImportError(traceback.format_exc()) + zoau_exceptions = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import jobs @@ -642,7 +632,7 @@ MAX_WAIT_TIME_S = 86400 -def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, start_time=timer()): +def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=None, start_time=timer()): """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. Arguments: @@ -652,7 +642,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, src_name (str) - the src name that was provided in the module because through the runtime src could be replace with a temporary file name timeout (int) - how long to wait in seconds for a job to complete - hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all + is_unix (bool) - True if JCL is a file in USS, otherwise False; Note that all JCL local to a controller is transfered to USS thus would be True volume (str) - volume the data set JCL is located on that will be cataloged before @@ -666,11 +656,11 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, """ kwargs = { - "timeout": timeout, - "hfs": hfs, + # Since every fetch retry waits for a second before continuing, + # we can just pass the timeout (also in seconds) to this arg. + "fetch_max_retries": timeout, } - wait = True # Wait is always true because the module requires wait_time_s > 0 present = False duration = 0 job_submitted = None @@ -691,9 +681,9 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, "not be cataloged on the volume {1}.".format(src, volume)) module.fail_json(**result) - job_submitted = jobs.submit(src, wait, None, **kwargs) + job_submitted = jobs.submit(src, is_unix=is_unix, **kwargs) - # Introducing a sleep to ensure we have the result of job sumbit carrying the job id + # Introducing a sleep to ensure we have the result of job submit carrying the job id. while (job_submitted is None and duration <= timeout): current_time = timer() duration = round(current_time - start_time) @@ -704,68 +694,69 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, # that is sent back as `AC` when the job is not complete but the problem # with monitoring 'AC' is that STARTED tasks never exit the AC status. if job_submitted: - job_listing_rc = jobs.listing(job_submitted.id)[0].rc - job_listing_status = jobs.listing(job_submitted.id)[0].status + job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code + job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status # Before moving forward lets ensure our job has completed but if we see # status that matches one in JOB_ERROR_MESSAGES, don't wait, let the code - # drop through and get analyzed in the main as it will scan the job ouput - # Any match to JOB_ERROR_MESSAGES ends our processing and wait times - while (job_listing_status not in JOB_ERROR_MESSAGES and - job_listing_status == 'AC' and - ((job_listing_rc is None or len(job_listing_rc) == 0 or - job_listing_rc == '?') and duration < timeout)): + # drop through and get analyzed in the main as it will scan the job ouput. + # Any match to JOB_ERROR_MESSAGES ends our processing and wait times. + while (job_fetch_status not in JOB_ERROR_MESSAGES and + job_fetch_status == 'AC' and + ((job_fetch_rc is None or len(job_fetch_rc) == 0 or + job_fetch_rc == '?') and duration < timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - job_listing_rc = jobs.listing(job_submitted.id)[0].rc - job_listing_status = jobs.listing(job_submitted.id)[0].status + job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code + job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status - # ZOAU throws a ZOAUException when the job sumbission fails thus there is no + # ZOAU throws a JobSubmitException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed # in the job_output parser. - except exceptions.ZOAUException as err: + except zoau_exceptions.JobSubmitException as err: result["changed"] = False result["failed"] = True - result["stderr"] = str(err) + result["stderr"] = to_text(err) result["duration"] = duration - result["job_id"] = job_submitted.id if job_submitted else None + result["job_id"] = job_submitted.job_id if job_submitted else None result["msg"] = ("Unable to submit job {0}, the job submission has failed. " "Without the job id, the error can not be determined. " "Consider using module `zos_job_query` to poll for the " "job by name or review the system log for purged jobs " - "resulting from an abend.".format(src_name)) + "resulting from an abend. Standard error may have " + "additional information.".format(src_name)) module.fail_json(**result) - # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id - # has been returned within the allocated time. - except exceptions.JobSubmitException as err: + # ZOAU throws a JobFetchException when it is unable to fetch a job. + # This could happen while trying to fetch a job still running. + except zoau_exceptions.JobFetchException as err: result["changed"] = False result["failed"] = False - result["stderr"] = str(err) + result["stderr"] = to_text(err) result["duration"] = duration - result["job_id"] = job_submitted.id if job_submitted else None - result["msg"] = ("The JCL has been submitted {0} and no job id was returned " - "within the allocated time of {1} seconds. Consider using " - " module zos_job_query to poll for a long running " - "jobs or increasing the value for " - "`wait_times_s`.".format(src_name, str(timeout))) + result["job_id"] = job_submitted.job_id + result["msg"] = ("The JCL has been submitted {0} with ID {1} but there was an " + "error while fetching its status within the allocated time of {2} " + "seconds. Consider using module zos_job_query to poll for the " + "job for more information. Standard error may have additional " + "information.".format(src_name, job_submitted.job_id, str(timeout))) module.fail_json(**result) - # Between getting a job_submitted and the jobs.listing(job_submitted.id)[0].rc + # Between getting a job_submitted and the jobs.fetch_multiple(job_submitted.job_id)[0].return_code # is enough time for the system to purge an invalid job, so catch it and let # it fall through to the catchall. except IndexError: job_submitted = None # There appears to be a small fraction of time when ZOAU has a handle on the - # job and and suddenly its purged, this check is to ensure the job is there + # job and suddenly its purged, this check is to ensure the job is there # long after the purge else we throw an error here if its been purged. if job_submitted is None: result["changed"] = False result["failed"] = True result["duration"] = duration - result["job_id"] = job_submitted.id if job_submitted else None + result["job_id"] = job_submitted.job_id if job_submitted else None result["msg"] = ("The job {0} has been submitted and no job id was returned " "within the allocated time of {1} seconds. Without the " "job id, the error can not be determined, consider using " @@ -774,15 +765,12 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, "abend.".format(src_name, str(timeout))) module.fail_json(**result) - return job_submitted.id if job_submitted else None, duration + return job_submitted.job_id if job_submitted else None, duration def run_module(): module_args = dict( src=dict(type="str", required=True), - wait=dict(type="bool", required=False, default=False, - removed_at_date='2022-11-30', - removed_from_collection='ibm.ibm_zos_core'), location=dict( type="str", default="DATA_SET", @@ -853,8 +841,6 @@ def run_module(): arg_defs = dict( src=dict(arg_type="data_set_or_path", required=True), - wait=dict(arg_type="bool", required=False, removed_at_date='2022-11-30', - removed_from_collection='ibm.ibm_zos_core'), location=dict( arg_type="str", default="DATA_SET", @@ -885,7 +871,6 @@ def run_module(): # Extract values from set module options location = parsed_args.get("location") volume = parsed_args.get("volume") - parsed_args.get("wait") src = parsed_args.get("src") return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") @@ -906,13 +891,13 @@ def run_module(): start_time = timer() if location == "DATA_SET": job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) + module, src, src_name=src, timeout=wait_time_s, is_unix=False, volume=volume, start_time=start_time) elif location == "USS": job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, hfs=True) + module, src, src_name=src, timeout=wait_time_s, is_unix=True) elif location == "LOCAL": job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, hfs=True) + module, src, src_name=src, timeout=wait_time_s, is_unix=True) try: # Explictly pass None for the unused args else a default of '*' will be @@ -997,7 +982,7 @@ def run_module(): result["msg"] = ("The JCL submitted with job id {0} but " "there was an error, please review " "the error for further details: {1}".format - (str(job_submitted_id), str(err))) + (str(job_submitted_id), to_text(err))) module.exit_json(**result) finally: diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 0fe6a59b9..cdd7175d2 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -277,7 +277,7 @@ def test_job_submit_PDS(ansible_zos_module): cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET" ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -306,7 +306,6 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): results = hosts.all.zos_job_submit( src="{0}(SAMPLE)".format(DATA_SET_NAME_SPECIAL_CHARS), location="DATA_SET", - wait=True, ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -325,7 +324,7 @@ def test_job_submit_USS(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -334,44 +333,64 @@ def test_job_submit_USS(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_job_submit_LOCAL(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -def test_job_submit_LOCAL(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) - - for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("changed") is True +# for result in results.contacted.values(): +# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" +# assert result.get("jobs")[0].get("ret_code").get("code") == 0 +# assert result.get("changed") is True -def test_job_submit_LOCAL_extraR(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_BACKSLASH_R) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_job_submit_LOCAL_extraR(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_BACKSLASH_R) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) - for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("changed") is True +# for result in results.contacted.values(): +# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" +# assert result.get("jobs")[0].get("ret_code").get("code") == 0 +# assert result.get("changed") is True -def test_job_submit_LOCAL_BADJCL(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_BAD) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_job_submit_LOCAL_BADJCL(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_BAD) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'completion code', repr(result.get("msg"))) +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'completion code', repr(result.get("msg"))) def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): @@ -508,123 +527,137 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): hosts.all.zos_data_set(name=data_set_name, state="absent") -@pytest.mark.parametrize("args", [ - dict(max_rc=None, wait_time_s=10), - dict(max_rc=4, wait_time_s=10), - dict(max_rc=12, wait_time_s=20) -]) -def test_job_submit_max_rc(ansible_zos_module, args): - """This""" - try: - hosts = ansible_zos_module - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_RC_8) - - results = hosts.all.zos_job_submit( - src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] - ) - - for result in results.contacted.values(): - # Should fail normally as a non-zero RC will result in job submit failure - if args["max_rc"] is None: - assert result.get("msg") is not None - assert result.get('changed') is False - # On busy systems, it is possible that the duration even for a job with a non-zero return code - # will take considerable time to obtain the job log and thus you could see either error msg below - #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" - # - Consider using module zos_job_query to poll for a long running job or - # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration'): - duration = result.get('duration') - else: - duration = 0 - - if duration >= args["wait_time_s"]: - re.search(r'long running job', repr(result.get("msg"))) - else: - assert re.search(r'non-zero', repr(result.get("msg"))) - - # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 - elif args["max_rc"] == 4: - assert result.get("msg") is not None - assert result.get('changed') is False - # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater - # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise - # this job submission has failed. - assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) - - elif args["max_rc"] == 12: - # Will not fail but changed will be false for the non-zero RC, there - # are other possibilities like an ABEND or JCL ERROR will fail this even - # with a MAX RC - assert result.get("msg") is None - assert result.get('changed') is False - assert result.get("jobs")[0].get("ret_code").get("code") < 12 - finally: - hosts.all.file(path=tmp_file.name, state="absent") - - -@pytest.mark.template -@pytest.mark.parametrize("args", [ - dict( - template="Default", - options=dict( - keep_trailing_newline=False - ) - ), - dict( - template="Custom", - options=dict( - keep_trailing_newline=False, - variable_start_string="((", - variable_end_string="))", - comment_start_string="(#", - comment_end_string="#)" - ) - ), - dict( - template="Loop", - options=dict( - keep_trailing_newline=False - ) - ) -]) -def test_job_submit_jinja_template(ansible_zos_module, args): - try: - hosts = ansible_zos_module - - tmp_file = tempfile.NamedTemporaryFile(delete=False) - with open(tmp_file.name, "w") as f: - f.write(JCL_TEMPLATES[args["template"]]) - - template_vars = dict( - pgm_name="HELLO", - input_dataset="DUMMY", - message="Hello, world", - steps=[ - dict(step_name="IN", dd="DUMMY"), - dict(step_name="PRINT", dd="SYSOUT=*"), - dict(step_name="UT1", dd="*") - ] - ) - for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): - host.vars.update(template_vars) - - results = hosts.all.zos_job_submit( - src=tmp_file.name, - location="LOCAL", - use_template=True, - template_parameters=args["options"] - ) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# @pytest.mark.parametrize("args", [ +# dict(max_rc=None, wait_time_s=10), +# dict(max_rc=4, wait_time_s=10), +# dict(max_rc=12, wait_time_s=20) +# ]) +# def test_job_submit_max_rc(ansible_zos_module, args): +# """This""" +# try: +# hosts = ansible_zos_module +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_RC_8) + +# results = hosts.all.zos_job_submit( +# src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] +# ) + +# for result in results.contacted.values(): +# # Should fail normally as a non-zero RC will result in job submit failure +# if args["max_rc"] is None: +# assert result.get("msg") is not None +# assert result.get('changed') is False +# # On busy systems, it is possible that the duration even for a job with a non-zero return code +# # will take considerable time to obtain the job log and thus you could see either error msg below +# #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" +# # - Consider using module zos_job_query to poll for a long running job or +# # increase option \\'wait_times_s` to a value greater than 10.", +# if result.get('duration'): +# duration = result.get('duration') +# else: +# duration = 0 + +# if duration >= args["wait_time_s"]: +# re.search(r'long running job', repr(result.get("msg"))) +# else: +# assert re.search(r'non-zero', repr(result.get("msg"))) + +# # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 +# elif args["max_rc"] == 4: +# assert result.get("msg") is not None +# assert result.get('changed') is False +# # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater +# # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise +# # this job submission has failed. +# assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) + +# elif args["max_rc"] == 12: +# # Will not fail but changed will be false for the non-zero RC, there +# # are other possibilities like an ABEND or JCL ERROR will fail this even +# # with a MAX RC +# assert result.get("msg") is None +# assert result.get('changed') is False +# assert result.get("jobs")[0].get("ret_code").get("code") < 12 +# finally: +# hosts.all.file(path=tmp_file.name, state="absent") - for result in results.contacted.values(): - assert result.get('changed') is True - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - finally: - os.remove(tmp_file.name) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# @pytest.mark.template +# @pytest.mark.parametrize("args", [ +# dict( +# template="Default", +# options=dict( +# keep_trailing_newline=False +# ) +# ), +# dict( +# template="Custom", +# options=dict( +# keep_trailing_newline=False, +# variable_start_string="((", +# variable_end_string="))", +# comment_start_string="(#", +# comment_end_string="#)" +# ) +# ), +# dict( +# template="Loop", +# options=dict( +# keep_trailing_newline=False +# ) +# ) +# ]) +# def test_job_submit_jinja_template(ansible_zos_module, args): +# try: +# hosts = ansible_zos_module + +# tmp_file = tempfile.NamedTemporaryFile(delete=False) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_TEMPLATES[args["template"]]) + +# template_vars = dict( +# pgm_name="HELLO", +# input_dataset="DUMMY", +# message="Hello, world", +# steps=[ +# dict(step_name="IN", dd="DUMMY"), +# dict(step_name="PRINT", dd="SYSOUT=*"), +# dict(step_name="UT1", dd="*") +# ] +# ) +# for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): +# host.vars.update(template_vars) + +# results = hosts.all.zos_job_submit( +# src=tmp_file.name, +# location="LOCAL", +# use_template=True, +# template_parameters=args["options"] +# ) + +# for result in results.contacted.values(): +# assert result.get('changed') is True +# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" +# assert result.get("jobs")[0].get("ret_code").get("code") == 0 + +# finally: +# os.remove(tmp_file.name) def test_job_submit_full_input(ansible_zos_module): @@ -635,7 +668,12 @@ def test_job_submit_full_input(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FULL_INPUT), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + src="{0}/SAMPLE".format(TEMP_PATH), + location="USS", + volume=None, + # This job used to set wait=True, but since it has been deprecated + # and removed, it now waits up to 30 seconds. + wait_time_s=30 ) for result in results.contacted.values(): print(result) @@ -645,45 +683,66 @@ def test_job_submit_full_input(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") -def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_NO_DSN) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'completion code', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_NO_DSN) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'completion code', repr(result.get("msg"))) +# assert result.get("jobs")[0].get("job_id") is not None +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" # Should have a JCL ERROR <int> -def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_INVALID_USER) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error SEC', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) - - -def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error ? ?', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None - assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" +# def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_INVALID_USER) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'return code was not available', repr(result.get("msg"))) +# assert re.search(r'error SEC', repr(result.get("msg"))) +# assert result.get("jobs")[0].get("job_id") is not None +# assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + + +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'return code was not available', repr(result.get("msg"))) +# assert re.search(r'error ? ?', repr(result.get("msg"))) +# assert result.get("jobs")[0].get("job_id") is not None +# assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" From 6bc8161e1c9309dfbddd84728649ec93252cfa1b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 12 Feb 2024 17:06:11 -0600 Subject: [PATCH 298/413] [v1.10.0][zos_job_query] Removed wait argument from test cases (#1217) * Removed wait arg from test cases * Added changelog --- changelogs/fragments/1217-validate-job-query.yml | 3 +++ tests/functional/modules/test_zos_job_query_func.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1217-validate-job-query.yml diff --git a/changelogs/fragments/1217-validate-job-query.yml b/changelogs/fragments/1217-validate-job-query.yml new file mode 100644 index 000000000..df97c3ca6 --- /dev/null +++ b/changelogs/fragments/1217-validate-job-query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_query - Removed zos_job_submit wait argument from tests. + (https://github.com/ansible-collections/ibm_zos_core/pull/1217). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index c0dc5bdca..8c1f170ed 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -63,7 +63,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -96,7 +96,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" From 658cefd54098df1f892a08a71ac09dc6a8111350 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 13 Feb 2024 14:47:09 -0600 Subject: [PATCH 299/413] [v1.10.0] [ zos_job_submit] Bugfix issue for zos_job_submit when location is not set (#1220) * Pulled v1.9.0 changes * Removed changelog * Updated changelog * Removed summary * Removed wait option --- ...ort_job_name_sends_back_a_value_error.yaml | 11 -------- ...20-bugfix-zos_job_submit-default_value.yml | 4 +++ plugins/action/zos_job_submit.py | 2 +- .../modules/test_zos_job_submit_func.py | 27 ++++++++++++++++--- 4 files changed, 28 insertions(+), 16 deletions(-) delete mode 100644 changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml create mode 100644 changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml diff --git a/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml b/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml deleted file mode 100644 index dd9dc98a5..000000000 --- a/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml +++ /dev/null @@ -1,11 +0,0 @@ -bugfixes: - - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. - Change now allows the use of a shorter job ID or name, as well as wildcards. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. - Change now allows the use of a shorter job ID or name, as well as wildcards. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - -minor_change: - - zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). \ No newline at end of file diff --git a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml new file mode 100644 index 000000000..83d2391ba --- /dev/null +++ b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. + (https://github.com/ansible-collections/ibm_zos_core/pull/1220). \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 4b0245b37..6bbd0f9d9 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -52,7 +52,7 @@ def run(self, tmp=None, task_vars=None): )) return result - if module_args["location"] == "LOCAL": + if location == "LOCAL": source = self._task.args.get("src", None) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index cdd7175d2..451f79c83 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -262,8 +262,21 @@ TEMP_PATH = "/tmp/jcl" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" -def test_job_submit_PDS(ansible_zos_module): +@pytest.mark.parametrize( + "location", [ + dict(default_location=True), + dict(default_location=False), + ] +) +def test_job_submit_PDS(ansible_zos_module, location): + """ + Test zos_job_submit with a PDS(MEMBER), also test the default + value for 'location', ensure it works with and without the + value "DATA_SET". If default_location is True, then don't + pass a 'location:DATA_SET' allow its default to come through. + """ try: + results = None hosts = ansible_zos_module data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") @@ -276,9 +289,15 @@ def test_job_submit_PDS(ansible_zos_module): hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) - results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET" - ) + if bool(location.get("default_location")): + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(data_set_name), wait_time_s=30 + ) + else: + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait_time_s=30 + ) + for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 From 05b53c4f2a3d87f71c2055c74e80ce8102c1b174 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 13 Feb 2024 14:47:50 -0600 Subject: [PATCH 300/413] [v1.10.0][zos_encode] Migrate zos_encode to use ZOAU 1.3 (#1218) * Modified zos_encode and remvoved wwait option for zos_job_submit * Updated parameter * fixed dsorg * Fixed copyright years * Added changelog --- changelogs/fragments/1218-migrate-zos_encode.yml | 3 +++ plugins/module_utils/data_set.py | 4 ++-- plugins/modules/zos_encode.py | 7 ++++--- tests/functional/modules/test_zos_encode_func.py | 10 +++++----- 4 files changed, 14 insertions(+), 10 deletions(-) create mode 100644 changelogs/fragments/1218-migrate-zos_encode.yml diff --git a/changelogs/fragments/1218-migrate-zos_encode.yml b/changelogs/fragments/1218-migrate-zos_encode.yml new file mode 100644 index 000000000..3d712b749 --- /dev/null +++ b/changelogs/fragments/1218-migrate-zos_encode.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1218). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 8b02d77f4..33b1958b4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -543,10 +543,10 @@ def data_set_type(name, volume=None): data_sets_found = datasets.list_datasets(name) - # Using the DSORG property when it's a sequential or partitioned + # Using the organization property when it's a sequential or partitioned # dataset. VSAMs are not found by datasets.list_datasets. if len(data_sets_found) > 0: - return data_sets_found[0].dsorg + return data_sets_found[0].organization # Next, trying to get the DATA information of a VSAM through # LISTCAT. diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 2628ab174..1adc08c01 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -273,7 +273,7 @@ sample: /path/file_name.2020-04-23-08-32-29-bak.tar """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, @@ -286,11 +286,12 @@ from os import makedirs from os import listdir import re +import traceback try: from zoautil_py import datasets except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) def check_pds_member(ds, mem): diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 5d58f2435..e017450ff 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -545,7 +545,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) for result in results.contacted.values(): @@ -671,7 +671,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -803,7 +803,7 @@ def test_vsam_backup(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) hosts.all.file(path=TEMP_JCL_PATH, state="absent") # submit JCL to populate KSDS @@ -814,7 +814,7 @@ def test_vsam_backup(ansible_zos_module): ) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) hosts.all.zos_encode( From 3849a210f0614fbad30a424107055c198345b3c5 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 15 Feb 2024 08:28:54 -0800 Subject: [PATCH 301/413] Enabler updates the AC tool with operator support for python and changes python search order. (#1224) * Update requirement files to use new operator for python versions Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac to correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * Update script to support requirements operator for python and change python search order Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright year Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 3 +- scripts/requirements-2.11.env | 2 +- scripts/requirements-2.12.env | 2 +- scripts/requirements-2.13.env | 2 +- scripts/requirements-2.14.env | 2 +- scripts/requirements-2.15.env | 2 +- scripts/requirements-2.16.env | 32 ++++++++++++++ scripts/requirements-2.9.env | 2 +- scripts/venv.sh | 83 +++++++++++++++++++++++++++-------- 9 files changed, 104 insertions(+), 26 deletions(-) create mode 100644 scripts/requirements-2.16.env diff --git a/ac b/ac index b5febedbb..dad00194c 100755 --- a/ac +++ b/ac @@ -584,7 +584,7 @@ host_nodes(){ ## the 'password' option should only an option when the utility can not decrypt. ## Usage: ac [--venv-setup] [--password 123456] ## Example: -## $ ac --venv-setup --passsword 123456 +## $ ac --venv-setup --password 123456 ## $ ac --venv-setup venv_setup(){ option_pass=$1 @@ -636,7 +636,6 @@ while true; do exit 1 fi fi - case $1 in -h|-\?|--help) if [ "$1" = "-h" ] || [ "$1" = "-?" ]; then diff --git a/scripts/requirements-2.11.env b/scripts/requirements-2.11.env index e7defb9fc..3b4a18d0c 100644 --- a/scripts/requirements-2.11.env +++ b/scripts/requirements-2.11.env @@ -31,5 +31,5 @@ requirements=( ) python=( -"python:3.8" +"<=:python:3.9" ) \ No newline at end of file diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env index 5052447da..4f6add957 100644 --- a/scripts/requirements-2.12.env +++ b/scripts/requirements-2.12.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.8" +"<=:python:3.10" ) \ No newline at end of file diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env index c08a7c7e9..cfce646d0 100644 --- a/scripts/requirements-2.13.env +++ b/scripts/requirements-2.13.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.8" +"<=:python:3.10" ) \ No newline at end of file diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env index 9d15b3dab..f1c423f8b 100644 --- a/scripts/requirements-2.14.env +++ b/scripts/requirements-2.14.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.9" +"<=:python:3.11" ) \ No newline at end of file diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env index 5f8b36260..3d94e55af 100644 --- a/scripts/requirements-2.15.env +++ b/scripts/requirements-2.15.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.9" +"<=:python:3.11" ) diff --git a/scripts/requirements-2.16.env b/scripts/requirements-2.16.env new file mode 100644 index 000000000..2d0ed42a1 --- /dev/null +++ b/scripts/requirements-2.16.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.16.3" +"pylint" +"rstcheck" +) + +python=( +"<=:python:3.12" +) diff --git a/scripts/requirements-2.9.env b/scripts/requirements-2.9.env index 2d7d9e11b..b962483f9 100644 --- a/scripts/requirements-2.9.env +++ b/scripts/requirements-2.9.env @@ -30,6 +30,6 @@ requirements=( ) python=( -"python:3.8" +"==:python:3.8" ) diff --git a/scripts/venv.sh b/scripts/venv.sh index 51426a055..56756d16e 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -27,6 +27,13 @@ VENV_HOME_MANAGED=${PWD%/*}/venv # Array where each entry is: "<index>:<version>:<mount>:<data_set>" HOSTS_ALL="" +OPER_EQ="==" +OPER_NE="!=" +OPER_LT="<" +OPER_LE="<=" +OPER_GT=">" +OPER_GE=">=" + # hosts_env="hosts.env" # if [ -f "$hosts_env" ]; then @@ -128,9 +135,9 @@ echo_requirements(){ py_req="0" for ver in "${python[@]}" ; do - key=${ver%%:*} - value=${ver#*:} - py_req="${value}" + py_op=`echo "${ver}" | cut -d ":" -f 1` + py_name=`echo "${ver}" | cut -d ":" -f 2` + py_req=`echo "${ver}" | cut -d ":" -f 3` done echo "${py_req}" done @@ -222,13 +229,29 @@ write_requirements(){ py_req="0" for ver in "${python[@]}" ; do - key=${ver%%:*} - value=${ver#*:} - py_req="${value}" + py_op=`echo "${ver}" | cut -d ":" -f 1` + py_name=`echo "${ver}" | cut -d ":" -f 2` + py_req=`echo "${ver}" | cut -d ":" -f 3` done + if [ "$OPER_EQ" == "$py_op" ];then + py_op="-eq" + elif [ "$OPER_NE" == "$py_op" ];then + py_op="-ne" + elif [ "$OPER_LT" == "$py_op" ];then + py_op="-lt" + elif [ "$OPER_LE" == "$py_op" ];then + py_op="-le" + elif [ "$OPER_GT" == "$py_op" ];then + py_op="-gt" + elif [ "$OPER_GE" == "$py_op" ];then + py_op="-ge" + fi + + discover_python $py_op $py_req + # Is the discoverd python >= what the requirements.txt requires? - if [ $(normalize_version $VERSION_PYTHON) -ge $(normalize_version $py_req) ]; then + if [ $(normalize_version $VERSION_PYTHON) "$py_op" $(normalize_version $py_req) ]; then echo "${REQ}${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt cp mounts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ #cp info.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ @@ -245,6 +268,16 @@ write_requirements(){ chmod 700 "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env #echo "${option_pass}" | openssl bf -d -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin + else + # echo a stub so the user can later choose to rename and configure + touch "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "# This configuration file is used by the tool to avoid exporting enviroment variables">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "# To use this, update all the variables with a value and rename the file to 'info.env'.">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "USER=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "PASS=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "HOST_SUFFIX=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "SSH_KEY_PIPELINE=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "No password was provided, a temporary 'info.env.changeme' file has been created for your convenience." fi else echo "Not able to create managed venv path: ${VENV_HOME_MANAGED}/${venv_name} , min python required is ${py_req}, found version $VERSION_PYTHON" @@ -282,36 +315,50 @@ find_in_path() { result="" OTHER_PYTHON_PATHS="/Library/Frameworks/Python.framework/Versions/Current/bin:/opt/homebrew/bin:" PATH="${OTHER_PYTHON_PATHS}${PATH}" + OLDIFS=$IFS IFS=: for x in $PATH; do if [ -x "$x/$1" ]; then result=${result}" $x/$1" fi done + IFS=$OLDIFS echo $result } - # Find the most recent python in a users path discover_python(){ - # Don't use which, it only will find first in path within script + operator=$1 + required_python=$2 + if [ ! "$operator" ]; then + operator="-ge" + fi + + if [ "$required_python" ]; then + VERSION_PYTHON=$required_python + fi + + # Don't use which, it only will find first in path within the script # for python_found in `which python3 | cut -d" " -f3`; do - pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11") # "python3.12" "python3.13" "python3.14") - #pys=("python3.8" "python3.9") + pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8") + rc=1 for py in "${pys[@]}"; do for python_found in `find_in_path $py`; do ver=`${python_found} --version | cut -d" " -f2` + rc=$? + ver=`echo $ver |cut -d"." -f1,2` ver_path="$python_found" echo "Found $ver_path" done - - if [ $(normalize_version $ver) -ge $(normalize_version $VERSION_PYTHON) ]; then - VERSION_PYTHON="$ver" - VERSION_PYTHON_PATH="$ver_path" + if [ $rc -eq 0 ];then + if [ $(normalize_version $ver) "$operator" $(normalize_version $VERSION_PYTHON) ]; then + VERSION_PYTHON="$ver" + VERSION_PYTHON_PATH="$ver_path" + break + fi fi - done echo ${DIVIDER} @@ -406,7 +453,7 @@ set_hosts_to_array(){ else # check if the env varas instead have been exported if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported." - echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env." + echo "Export and set vars: 'USER', 'PASS','HOST_SUFFIX' and optionally 'SSH_KEY_PIPELINE', or place them in a file named info.env." exit 1 fi fi @@ -566,7 +613,7 @@ case "$1" in discover_python ;; --vsetup) - discover_python + #discover_python make_venv_dirs #echo_requirements write_requirements $3 From a8d2fc227c66020cf47cd13b3edc862f20741453 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 15 Feb 2024 12:03:13 -0600 Subject: [PATCH 302/413] [v1.10.0][zos_apf] Migrate zos_apf to ZOAU 1.3 (#1204) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Modified test case and apf code * Updated exceptions * Updated changelog * Changed zos_apf exit call * Uncommented test case * Added try - finally statements to make sure we clean up the environment upon failure * Removed validation & fixed test case * Removed zos_data_set dependency * updated test cacse * Fixed typo * Added validation for volume * Updated changelog * Modified tests to only fetch volumes with VVDS once per session * Updated zos_apf tests --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- changelogs/fragments/1204-migrate-zos_apf.yml | 12 + plugins/modules/zos_apf.py | 29 +- tests/conftest.py | 14 +- tests/functional/modules/test_zos_apf_func.py | 811 +++++++++--------- tests/helpers/volumes.py | 44 +- 5 files changed, 517 insertions(+), 393 deletions(-) create mode 100644 changelogs/fragments/1204-migrate-zos_apf.yml diff --git a/changelogs/fragments/1204-migrate-zos_apf.yml b/changelogs/fragments/1204-migrate-zos_apf.yml new file mode 100644 index 000000000..89db1abd2 --- /dev/null +++ b/changelogs/fragments/1204-migrate-zos_apf.yml @@ -0,0 +1,12 @@ +bugfixes: + - zos_apf - List option only returned one data set. Fix now returns + the list of retrieved data sets. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + +minor_changes: + - zos_apf - Enhanced error messages when an exception is caught. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + +trivial: + - zos_apf - Migrated the module to use ZOAU v1.3.0 json schema. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). \ No newline at end of file diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index dee6094fc..bba3beb19 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -292,6 +292,7 @@ import re import json +from ansible.module_utils._text import to_text from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) @@ -522,6 +523,12 @@ def main(): result['rc'] = operRc result['stdout'] = operOut if operation == 'list': + try: + data = json.loads(operOut) + data_sets = data["data"]["datasets"] + except Exception as e: + err_msg = "An exception occurred. See stderr for more details." + module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr) if not library: library = "" if not volume: @@ -529,18 +536,26 @@ def main(): if sms: sms = "*SMS*" if library or volume or sms: - try: - data = json.loads(operOut) - except json.JSONDecodeError: - module.exit_json(**result) - for d in data[2:]: + ds_list = "" + for d in data_sets: ds = d.get('ds') vol = d.get('vol') try: if (library and re.match(library, ds)) or (volume and re.match(volume, vol)) or (sms and sms == vol): - result['stdout'] = "{0} {1}\n".format(vol, ds) + ds_list = ds_list + "{0} {1}\n".format(vol, ds) except re.error: module.exit_json(**result) + result['stdout'] = ds_list + else: + """ + ZOAU 1.3 changed the output from apf, having the data set list inside a new "data" tag. + To keep consistency with previous ZOAU versions now we have to filter the json response. + """ + try: + result['stdout'] = json.dumps(data.get("data")) + except Exception as e: + err_msg = "An exception occurred. See stderr for more details." + module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr) module.exit_json(**result) diff --git a/tests/conftest.py b/tests/conftest.py index c8513ad37..7fea5ac0d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,7 @@ __metaclass__ = type import pytest from ibm_zos_core.tests.helpers.ztest import ZTestHelper -from ibm_zos_core.tests.helpers.volumes import get_volumes +from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds import sys from mock import MagicMock import importlib @@ -93,6 +93,18 @@ def volumes_on_systems(ansible_zos_module, request): list_Volumes = get_volumes(ansible_zos_module, path) yield list_Volumes + +@pytest.fixture(scope="session") +def volumes_with_vvds(ansible_zos_module, request): + """ Return a list of volumes that have a VVDS. If no volume has a VVDS + then it will try to create one for each volume found and return volumes only + if a VVDS was successfully created for it.""" + path = request.config.getoption("--zinventory") + volumes = get_volumes(ansible_zos_module, path) + volumes_with_vvds = get_volumes_with_vvds(ansible_zos_module, volumes) + yield volumes_with_vvds + + # * We no longer edit sys.modules directly to add zoautil_py mock # * because automatic teardown is not performed, leading to mock pollution # * across test files. diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 3c3d96ab2..9722b92fa 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -16,9 +16,6 @@ from ibm_zos_core.tests.helpers.volumes import Volume_Handler from shellescape import quote from pprint import pprint -import os -import sys -import pytest __metaclass__ = type @@ -53,103 +50,116 @@ def clean_test_env(hosts, test_info): hosts.all.shell(cmd=cmdStr) -def test_add_del(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_add_del(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - clean_test_env(hosts, test_info) - - -def test_add_del_with_tmp_hlq_option(ansible_zos_module): - hosts = ansible_zos_module - tmphlq = "TMPHLQ" - test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) - test_info['tmp_hlq'] = tmphlq - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + finally: + clean_test_env(hosts, test_info) + + +def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + tmphlq = "TMPHLQ" + test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) + test_info['tmp_hlq'] = tmphlq + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + assert result.get("backup_name")[:6] == tmphlq + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - assert result.get("backup_name")[:6] == tmphlq - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - clean_test_env(hosts, test_info) - - -def test_add_del_volume(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", volume="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + finally: + clean_test_env(hosts, test_info) + + +def test_add_del_volume(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", volume="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - clean_test_env(hosts, test_info) + assert result.get("rc") == 0 + finally: + clean_test_env(hosts, test_info) """ +This test case was removed 3 years ago in the following PR : https://github.com/ansible-collections/ibm_zos_core/pull/197 def test_add_del_persist(ansible_zos_module): hosts = ansible_zos_module test_info = TEST_INFO['test_add_del_persist'] @@ -176,50 +186,54 @@ def test_add_del_persist(ansible_zos_module): """ -def test_add_del_volume_persist(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " +def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) + for result in results.contacted.values(): + assert result.get("rc") == 0 + add_exptd = add_expected.format(test_info['library'], test_info['volume']) + add_exptd = add_exptd.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) results = hosts.all.shell(cmd=cmdStr) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == add_exptd + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) + for result in results.contacted.values(): + assert result.get("rc") == 0 + del_exptd = del_expected.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) results = hosts.all.shell(cmd=cmdStr) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - add_exptd = add_expected.format(test_info['library'], test_info['volume']) - add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - actual = result.get("stdout") - actual = actual.replace(" ", "") - assert actual == add_exptd - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - actual = result.get("stdout") - actual = actual.replace(" ", "") - assert actual == del_exptd - clean_test_env(hosts, test_info) + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == del_exptd + finally: + clean_test_env(hosts, test_info) """ keyword: ENABLE-FOR-1-3 @@ -227,56 +241,58 @@ def test_add_del_volume_persist(ansible_zos_module): whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 should uncomment this test as part of the validation process. """ -#def test_batch_add_del(ansible_zos_module): -# hosts = ansible_zos_module -# test_info = dict( -# batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], -# persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True -# ) -# for item in test_info['batch']: -# ds = get_tmp_ds_name(1,1) -# hosts.all.shell(cmd="dtouch {0}".format(ds)) -# item['library'] = ds -# cmdStr = "dls -l " + ds + " | awk '{print $5}' " -# results = hosts.all.shell(cmd=cmdStr) -# for result in results.contacted.values(): -# vol = result.get("stdout") -# item['volume'] = vol -# prstds = get_tmp_ds_name(5,5) -# cmdStr = "dtouch {0}".format(prstds) -# hosts.all.shell(cmd=cmdStr) -# test_info['persistent']['data_set_name'] = prstds -# hosts.all.shell(cmd="echo \"{0}\" > {1}".format("Hello World, Here's Jhonny", prstds)) -# results = hosts.all.zos_apf(**test_info) -# pprint(vars(results)) -# for result in results.contacted.values(): -# assert result.get("rc") == 0 -# add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], -# test_info['batch'][1]['library'], test_info['batch'][1]['volume'], -# test_info['batch'][2]['library'], test_info['batch'][2]['volume']) -# add_exptd = add_exptd.replace(" ", "") -# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) -# results = hosts.all.shell(cmd=cmdStr) -# for result in results.contacted.values(): -# actual = result.get("stdout") -# actual = actual.replace(" ", "") -# assert actual == add_exptd -# test_info['state'] = 'absent' -# results = hosts.all.zos_apf(**test_info) -# pprint(vars(results)) -# for result in results.contacted.values(): -# assert result.get("rc") == 0 -# del_exptd = del_expected.replace(" ", "") -# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) -# results = hosts.all.shell(cmd=cmdStr) -# for result in results.contacted.values(): -# actual = result.get("stdout") -# actual = actual.replace(" ", "") -# assert actual == del_exptd -# for item in test_info['batch']: -# clean_test_env(hosts, item) -# cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) -# hosts.all.shell(cmd=cmdStr) +def test_batch_add_del(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict( + batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], + persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True + ) + for item in test_info['batch']: + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + item['library'] = ds + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + item['volume'] = vol + prstds = get_tmp_ds_name(5,5) + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("rc") == 0 + add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], + test_info['batch'][1]['library'], test_info['batch'][1]['volume'], + test_info['batch'][2]['library'], test_info['batch'][2]['volume']) + add_exptd = add_exptd.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == add_exptd + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("rc") == 0 + del_exptd = del_expected.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == del_exptd + finally: + for item in test_info['batch']: + clean_test_env(hosts, item) + hosts.all.shell(cmd="drm {0}".format(test_info['persistent']['data_set_name'])) def test_operation_list(ansible_zos_module): @@ -285,111 +301,124 @@ def test_operation_list(ansible_zos_module): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): listJson = result.get("stdout") + print(listJson) import json data = json.loads(listJson) - assert data[0]['format'] in ['DYNAMIC', 'STATIC'] + assert data['format'] in ['DYNAMIC', 'STATIC'] del json -def test_operation_list_with_filter(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + hosts.all.zos_apf(**test_info) + ti = dict(operation="list", library="") + ti['library'] = "ANSIBLE.*" + results = hosts.all.zos_apf(**ti) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - hosts.all.zos_apf(**test_info) - ti = dict(operation="list", library="") - ti['library'] = "APFTEST.*" - results = hosts.all.zos_apf(**ti) - for result in results.contacted.values(): - listFiltered = result.get("stdout") - assert test_info['library'] in listFiltered - test_info['state'] = 'absent' - hosts.all.zos_apf(**test_info) - clean_test_env(hosts, test_info) + listFiltered = result.get("stdout") + assert test_info['library'] in listFiltered + test_info['state'] = 'absent' + hosts.all.zos_apf(**test_info) + finally: + clean_test_env(hosts, test_info) # # Negative tests # -def test_add_already_present(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_add_already_present(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - assert result.get("rc") == 16 or result.get("rc") == 8 - test_info['state'] = 'absent' - hosts.all.zos_apf(**test_info) - clean_test_env(hosts, test_info) - - -def test_del_not_present(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("rc") == 16 or result.get("rc") == 8 + test_info['state'] = 'absent' + hosts.all.zos_apf(**test_info) + finally: + clean_test_env(hosts, test_info) + + +def test_del_not_present(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - assert result.get("rc") == 16 or result.get("rc") == 8 - clean_test_env(hosts, test_info) + # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("rc") == 16 or result.get("rc") == 8 + finally: + clean_test_env(hosts, test_info) def test_add_not_found(ansible_zos_module): @@ -402,119 +431,135 @@ def test_add_not_found(ansible_zos_module): assert result.get("rc") == 16 or result.get("rc") == 8 -def test_add_with_wrong_volume(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", volume="", state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", volume="", state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['volume'] = 'T12345' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) + # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("rc") == 16 or result.get("rc") == 8 + finally: + clean_test_env(hosts, test_info) + + +def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['volume'] = 'T12345' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - assert result.get("rc") == 16 or result.get("rc") == 8 - clean_test_env(hosts, test_info) - - -def test_persist_invalid_ds_format(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) - hosts.all.shell(cmd=cmdStr) - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 8 - clean_test_env(hosts, test_info) - - -def test_persist_invalid_marker(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['persistent']['marker'] = "# Invalid marker format" - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 4 - clean_test_env(hosts, test_info) - - -def test_persist_invalid_marker_len(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 8 + finally: + clean_test_env(hosts, test_info) + + +def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['marker'] = "# Invalid marker format" + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 4 + finally: + clean_test_env(hosts, test_info) + + +def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("msg") == 'marker length may not exceed 72 characters' - clean_test_env(hosts, test_info) \ No newline at end of file + assert result.get("msg") == 'marker length may not exceed 72 characters' + finally: + clean_test_env(hosts, test_info) \ No newline at end of file diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py index b0ed97d30..952cbb1e7 100644 --- a/tests/helpers/volumes.py +++ b/tests/helpers/volumes.py @@ -18,7 +18,7 @@ import pytest import time import yaml - +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name class Volume: """ Volume class represents a volume on the z system, it tracks if the volume name and status of the volume with respect to the current test session.""" @@ -118,4 +118,44 @@ def read_test_config(path): if len(config["VOLUMES"]) > 0: return config["VOLUMES"] else: - return None \ No newline at end of file + return None + +def get_volumes_with_vvds( ansible_zos_module, volumes_on_system): + """ + Get a list of volumes that contain a VVDS, if no volume has a VVDS then + creates one on any volume. + """ + volumes_with_vvds = find_volumes_with_vvds(ansible_zos_module, volumes_on_system) + if len(volumes_with_vvds) == 0 and len(volumes_on_system) > 0: + volumes_with_vvds = list() + for volume in volumes_on_system: + if create_vvds_on_volume(ansible_zos_module, volume): + volumes_with_vvds.append(volume) + return volumes_with_vvds + +def find_volumes_with_vvds( ansible_zos_module, volumes_on_system): + """ + Fetches all VVDS in the system and returns a list of volumes for + which there are VVDS. + """ + hosts = ansible_zos_module + vls_result = hosts.all.shell(cmd="vls SYS1.VVDS.*") + for vls_res in vls_result.contacted.values(): + vvds_list = vls_res.get("stdout") + return [volume for volume in volumes_on_system if volume in vvds_list] + +def create_vvds_on_volume( ansible_zos_module, volume): + """ + Creates a vvds on a volume by allocating a small VSAM and then deleting it. + """ + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(mlq_size=7, llq_size=7) + hosts.all.shell(cmd=f"dtouch -tesds -s10K -V{volume} {data_set_name}") + # Remove that dataset + hosts.all.shell(cmd=f"drm {data_set_name}") + # Verify that the VVDS is in place + vls_result = hosts.all.shell(cmd=f"vls SYS1.VVDS.V{volume} ") + for vls_res in vls_result.contacted.values(): + if vls_res.get("rc") == 0: + return True + return False \ No newline at end of file From dc4fdc3b139f88b568c2866c1792a9052557c177 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 20 Feb 2024 13:23:38 -0700 Subject: [PATCH 303/413] [v1.10.0] [zos_copy] zos_copy migration to ZOAU v1.3.0 (#1222) * Update calls to datasets API * Update tests * Update zos_job_submit tests that depend on zos_copy * Add changelog fragment * Enable tests that depend on zos_encode * Remove calls to datasets._copy * Fixed pep8 issue * Fixed bug when copying from MVS to USS --- .../1222-zoau-migration-zos_copy.yml | 3 + plugins/module_utils/job.py | 12 +- plugins/modules/zos_copy.py | 123 +++-- .../functional/modules/test_zos_copy_func.py | 8 +- .../modules/test_zos_job_submit_func.py | 427 ++++++++---------- 5 files changed, 286 insertions(+), 287 deletions(-) create mode 100644 changelogs/fragments/1222-zoau-migration-zos_copy.yml diff --git a/changelogs/fragments/1222-zoau-migration-zos_copy.yml b/changelogs/fragments/1222-zoau-migration-zos_copy.yml new file mode 100644 index 000000000..edc6eec06 --- /dev/null +++ b/changelogs/fragments/1222-zoau-migration-zos_copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Migrated the module to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1222). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index a854a0a1e..4a432d764 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -273,13 +273,21 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["owner"] = entry.owner job["ret_code"] = dict() - job["ret_code"]["msg"] = "{0} {1}".format(entry.status, entry.return_code) + + # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'. + # This new way of constructing msg allows for a better empty message. + # "" instead of "None None". + job["ret_code"]["msg"] = "{0} {1}".format( + entry.status if entry.status else "", + entry.return_code if entry.return_code else "" + ).strip() + job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None if entry.return_code and len(entry.return_code) > 0: if entry.return_code.isdigit(): job["ret_code"]["code"] = int(entry.return_code) - job["ret_code"]["msg_text"] = entry.status + job["ret_code"]["msg_text"] = entry.status if entry.status else "?" # Beginning in ZOAU v1.3.0, the Job class changes svc_class to # service_class. diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index e07b44a97..a854d1cae 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -804,7 +804,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( idcams @@ -829,6 +829,7 @@ import math import tempfile import os +import traceback if PY3: from re import fullmatch @@ -839,7 +840,13 @@ try: from zoautil_py import datasets, opercmd except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + opercmd = ZOAUImportError(traceback.format_exc()) + +try: + from zoautil_py import exceptions as zoau_exceptions +except ImportError: + zoau_exceptions = ZOAUImportError(traceback.format_exc()) class CopyHandler(object): @@ -909,6 +916,14 @@ def copy_to_seq( if src_type == 'USS' and self.asa_text: response = copy.copy_asa_uss2mvs(new_src, dest) + + if response.rc != 0: + raise CopyOperationError( + msg="Unable to copy source {0} to {1}".format(new_src, dest), + rc=response.rc, + stdout=response.stdout_response, + stderr=response.stderr_response + ) else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. @@ -918,14 +933,15 @@ def copy_to_seq( if self.force_lock: copy_args["options"] += " -f" - response = datasets._copy(new_src, dest, None, **copy_args) - if response.rc != 0: - raise CopyOperationError( - msg="Unable to copy source {0} to {1}".format(new_src, dest), - rc=response.rc, - stdout=response.stdout_response, - stderr=response.stderr_response - ) + try: + datasets.copy(new_src, dest, **copy_args) + except zoau_exceptions.ZOAUException as copy_exception: + raise CopyOperationError( + msg="Unable to copy source {0} to {1}".format(new_src, dest), + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response + ) def copy_to_vsam(self, src, dest): """Copy source VSAM to destination VSAM. @@ -988,9 +1004,11 @@ def _copy_tree(self, entries, src, dest, dirs_exist_ok=False): else: opts = dict() opts["options"] = "" - response = datasets._copy(src_name, dest_name, None, **opts) - if response.rc > 0: - raise Exception(response.stderr_response) + + try: + datasets.copy(src_name, dest_name, **opts) + except zoau_exceptions.ZOAUException as copy_exception: + raise Exception(copy_exception.response.stderr_response) shutil.copystat(src_name, dest_name, follow_symlinks=True) except Exception as err: raise err @@ -1356,14 +1374,17 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): else: opts = dict() opts["options"] = "" - response = datasets._copy(new_src, dest, None, **opts) - if response.rc > 0: - raise Exception(response.stderr_response) + datasets.copy(new_src, dest, **opts) shutil.copystat(new_src, dest, follow_symlinks=True) # shutil.copy(new_src, dest) if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) + except zoau_exceptions.ZOAUException as err: + raise CopyOperationError( + msg="Unable to copy file {0} to {1}".format(new_src, dest), + stderr=err.response.stderr_response, + ) except OSError as err: raise CopyOperationError( msg="Destination {0} is not writable".format(dest), @@ -1549,12 +1570,21 @@ def _mvs_copy_to_uss( if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest) + rc = response.rc elif self.executable: - response = datasets._copy(src, dest, alias=True, executable=True) + try: + rc = datasets.copy(src, dest, alias=True, executable=True) + except zoau_exceptions.ZOAUException as copy_exception: + response = copy_exception.response + rc = response.rc else: - response = datasets._copy(src, dest) + try: + rc = datasets.copy(src, dest) + except zoau_exceptions.ZOAUException as copy_exception: + response = copy_exception.response + rc = response.rc - if response.rc != 0: + if rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), rc=response.rc, @@ -1563,14 +1593,14 @@ def _mvs_copy_to_uss( ) else: if self.executable: - response = datasets._copy(src, dest, None, alias=True, executable=True) - - if response.rc != 0: + try: + datasets.copy(src, dest, alias=True, executable=True) + except zoau_exceptions.ZOAUException as copy_exception: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), - rc=response.rc, - stdout=response.stdout_response, - stderr=response.stderr_response + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response ) elif self.asa_text: response = copy.copy_asa_pds2uss(src, dest) @@ -1785,6 +1815,7 @@ def copy_to_member( if src_type == 'USS' and self.asa_text: response = copy.copy_asa_uss2mvs(src, dest) + rc, out, err = response.rc, response.stdout_response, response.stderr_response else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. @@ -1794,8 +1825,14 @@ def copy_to_member( if self.force_lock: opts["options"] += " -f" - response = datasets._copy(src, dest, alias=self.aliases, executable=self.executable, **opts) - rc, out, err = response.rc, response.stdout_response, response.stderr_response + try: + rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, **opts) + out = "" + err = "" + except zoau_exceptions.ZOAUException as copy_exception: + rc = copy_exception.response.rc + out = copy_exception.response.stdout_response + err = copy_exception.response.stderr_response return dict( rc=rc, @@ -1852,8 +1889,8 @@ def dump_data_set_member_to_file(data_set_member, is_binary): if is_binary: copy_args["options"] = "-B" - response = datasets._copy(data_set_member, temp_path, None, **copy_args) - if response.rc != 0 or response.stderr_response: + response = datasets.copy(data_set_member, temp_path, **copy_args) + if response != 0: raise DataSetMemberAttributeError(data_set_member) return temp_path @@ -2315,7 +2352,7 @@ def get_attributes_of_any_dataset_created( volume=volume ) else: - src_attributes = datasets.listing(src_name)[0] + src_attributes = datasets.list_datasets(src_name)[0] size = int(src_attributes.total_space) params = get_data_set_attributes( dest, @@ -2397,8 +2434,8 @@ def allocate_destination_data_set( try: # Dumping the member into a file in USS to compute the record length and # size for the new data set. - src_attributes = datasets.listing(src_name)[0] - record_length = int(src_attributes.lrecl) + src_attributes = datasets.list_datasets(src_name)[0] + record_length = int(src_attributes.record_length) temp_dump = dump_data_set_member_to_file(src, is_binary) create_seq_dataset_from_file( temp_dump, @@ -2417,11 +2454,11 @@ def allocate_destination_data_set( if src_ds_type in data_set.DataSet.MVS_PARTITIONED: data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: - src_attributes = datasets.listing(src_name)[0] + src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) - record_format = src_attributes.recfm - record_length = int(src_attributes.lrecl) + record_format = src_attributes.record_format + record_length = int(src_attributes.record_length) dest_params = get_data_set_attributes( dest, size, @@ -2507,8 +2544,8 @@ def allocate_destination_data_set( asa_text, volume ) - dest_attributes = datasets.listing(dest)[0] - record_format = dest_attributes.recfm + dest_attributes = datasets.list_datasets(dest)[0] + record_format = dest_attributes.record_format dest_params["type"] = dest_ds_type dest_params["record_format"] = record_format return True, dest_params @@ -2730,8 +2767,8 @@ def run_module(module, arg_def): src_ds_type = data_set.DataSet.data_set_type(src_name) if src_ds_type not in data_set.DataSet.MVS_VSAM: - src_attributes = datasets.listing(src_name)[0] - if src_attributes.recfm == 'FBA' or src_attributes.recfm == 'VBA': + src_attributes = datasets.list_datasets(src_name)[0] + if src_attributes.record_format == 'FBA' or src_attributes.record_format == 'VBA': src_has_asa_chars = True else: raise NonExistentSourceError(src) @@ -2785,8 +2822,8 @@ def run_module(module, arg_def): elif not dest_exists and asa_text: dest_has_asa_chars = True elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_attributes = datasets.listing(dest_name)[0] - if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_attributes = datasets.list_datasets(dest_name)[0] + if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': dest_has_asa_chars = True if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): @@ -2794,8 +2831,8 @@ def run_module(module, arg_def): elif not dest_exists and asa_text: dest_has_asa_chars = True elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_attributes = datasets.listing(dest_name)[0] - if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_attributes = datasets.list_datasets(dest_name)[0] + if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': dest_has_asa_chars = True if dest_ds_type in data_set.DataSet.MVS_PARTITIONED: diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 1cb3cb7cb..cf7f1494b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -4347,16 +4347,21 @@ def test_backup_pds(ansible_zos_module, args): def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() + source_member = f"{source}(MEM)" dest = get_tmp_ds_name() volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() + if volume_1 == "SCR03": volume = volumes.get_available_vol() volumes.free_vol(volume_1) volume_1 = volume + try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "seq": + hosts.all.zos_data_set(name=source_member, type="member", state='present') + copy_res = hosts.all.zos_copy( src=source, dest=dest, @@ -4406,7 +4411,6 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): finally: hosts.all.zos_data_set(name=dest_ds, state="absent") - @pytest.mark.vsam @pytest.mark.parametrize("force", [False, True]) def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 451f79c83..9de3e992a 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -352,64 +352,45 @@ def test_job_submit_USS(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_job_submit_LOCAL(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -# for result in results.contacted.values(): -# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" -# assert result.get("jobs")[0].get("ret_code").get("code") == 0 -# assert result.get("changed") is True +def test_job_submit_LOCAL(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + for result in results.contacted.values(): + print(result) + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_job_submit_LOCAL_extraR(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_BACKSLASH_R) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -# for result in results.contacted.values(): -# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" -# assert result.get("jobs")[0].get("ret_code").get("code") == 0 -# assert result.get("changed") is True +def test_job_submit_LOCAL_extraR(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_BACKSLASH_R) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_job_submit_LOCAL_BADJCL(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_BAD) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'completion code', repr(result.get("msg"))) +def test_job_submit_LOCAL_BADJCL(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_BAD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'completion code', repr(result.get("msg"))) def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): @@ -546,137 +527,123 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): hosts.all.zos_data_set(name=data_set_name, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# @pytest.mark.parametrize("args", [ -# dict(max_rc=None, wait_time_s=10), -# dict(max_rc=4, wait_time_s=10), -# dict(max_rc=12, wait_time_s=20) -# ]) -# def test_job_submit_max_rc(ansible_zos_module, args): -# """This""" -# try: -# hosts = ansible_zos_module -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_RC_8) - -# results = hosts.all.zos_job_submit( -# src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] -# ) - -# for result in results.contacted.values(): -# # Should fail normally as a non-zero RC will result in job submit failure -# if args["max_rc"] is None: -# assert result.get("msg") is not None -# assert result.get('changed') is False -# # On busy systems, it is possible that the duration even for a job with a non-zero return code -# # will take considerable time to obtain the job log and thus you could see either error msg below -# #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" -# # - Consider using module zos_job_query to poll for a long running job or -# # increase option \\'wait_times_s` to a value greater than 10.", -# if result.get('duration'): -# duration = result.get('duration') -# else: -# duration = 0 - -# if duration >= args["wait_time_s"]: -# re.search(r'long running job', repr(result.get("msg"))) -# else: -# assert re.search(r'non-zero', repr(result.get("msg"))) - -# # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 -# elif args["max_rc"] == 4: -# assert result.get("msg") is not None -# assert result.get('changed') is False -# # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater -# # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise -# # this job submission has failed. -# assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) - -# elif args["max_rc"] == 12: -# # Will not fail but changed will be false for the non-zero RC, there -# # are other possibilities like an ABEND or JCL ERROR will fail this even -# # with a MAX RC -# assert result.get("msg") is None -# assert result.get('changed') is False -# assert result.get("jobs")[0].get("ret_code").get("code") < 12 -# finally: -# hosts.all.file(path=tmp_file.name, state="absent") +@pytest.mark.parametrize("args", [ + dict(max_rc=None, wait_time_s=10), + dict(max_rc=4, wait_time_s=10), + dict(max_rc=12, wait_time_s=20) +]) +def test_job_submit_max_rc(ansible_zos_module, args): + """This""" + try: + hosts = ansible_zos_module + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_RC_8) + results = hosts.all.zos_job_submit( + src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] + ) + + for result in results.contacted.values(): + # Should fail normally as a non-zero RC will result in job submit failure + if args["max_rc"] is None: + assert result.get("msg") is not None + assert result.get('changed') is False + # On busy systems, it is possible that the duration even for a job with a non-zero return code + # will take considerable time to obtain the job log and thus you could see either error msg below + #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" + # - Consider using module zos_job_query to poll for a long running job or + # increase option \\'wait_times_s` to a value greater than 10.", + if result.get('duration'): + duration = result.get('duration') + else: + duration = 0 + + if duration >= args["wait_time_s"]: + re.search(r'long running job', repr(result.get("msg"))) + else: + assert re.search(r'non-zero', repr(result.get("msg"))) + + # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 + elif args["max_rc"] == 4: + assert result.get("msg") is not None + assert result.get('changed') is False + # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater + # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise + # this job submission has failed. + assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) + + elif args["max_rc"] == 12: + # Will not fail but changed will be false for the non-zero RC, there + # are other possibilities like an ABEND or JCL ERROR will fail this even + # with a MAX RC + assert result.get("msg") is None + assert result.get('changed') is False + assert result.get("jobs")[0].get("ret_code").get("code") < 12 + finally: + hosts.all.file(path=tmp_file.name, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# @pytest.mark.template -# @pytest.mark.parametrize("args", [ -# dict( -# template="Default", -# options=dict( -# keep_trailing_newline=False -# ) -# ), -# dict( -# template="Custom", -# options=dict( -# keep_trailing_newline=False, -# variable_start_string="((", -# variable_end_string="))", -# comment_start_string="(#", -# comment_end_string="#)" -# ) -# ), -# dict( -# template="Loop", -# options=dict( -# keep_trailing_newline=False -# ) -# ) -# ]) -# def test_job_submit_jinja_template(ansible_zos_module, args): -# try: -# hosts = ansible_zos_module - -# tmp_file = tempfile.NamedTemporaryFile(delete=False) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_TEMPLATES[args["template"]]) - -# template_vars = dict( -# pgm_name="HELLO", -# input_dataset="DUMMY", -# message="Hello, world", -# steps=[ -# dict(step_name="IN", dd="DUMMY"), -# dict(step_name="PRINT", dd="SYSOUT=*"), -# dict(step_name="UT1", dd="*") -# ] -# ) -# for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): -# host.vars.update(template_vars) - -# results = hosts.all.zos_job_submit( -# src=tmp_file.name, -# location="LOCAL", -# use_template=True, -# template_parameters=args["options"] -# ) - -# for result in results.contacted.values(): -# assert result.get('changed') is True -# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" -# assert result.get("jobs")[0].get("ret_code").get("code") == 0 - -# finally: -# os.remove(tmp_file.name) + +@pytest.mark.template +@pytest.mark.parametrize("args", [ + dict( + template="Default", + options=dict( + keep_trailing_newline=False + ) + ), + dict( + template="Custom", + options=dict( + keep_trailing_newline=False, + variable_start_string="((", + variable_end_string="))", + comment_start_string="(#", + comment_end_string="#)" + ) + ), + dict( + template="Loop", + options=dict( + keep_trailing_newline=False + ) + ) +]) +def test_job_submit_jinja_template(ansible_zos_module, args): + try: + hosts = ansible_zos_module + + tmp_file = tempfile.NamedTemporaryFile(delete=False) + with open(tmp_file.name, "w") as f: + f.write(JCL_TEMPLATES[args["template"]]) + + template_vars = dict( + pgm_name="HELLO", + input_dataset="DUMMY", + message="Hello, world", + steps=[ + dict(step_name="IN", dd="DUMMY"), + dict(step_name="PRINT", dd="SYSOUT=*"), + dict(step_name="UT1", dd="*") + ] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + results = hosts.all.zos_job_submit( + src=tmp_file.name, + location="LOCAL", + use_template=True, + template_parameters=args["options"] + ) + + for result in results.contacted.values(): + assert result.get('changed') is True + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + finally: + os.remove(tmp_file.name) def test_job_submit_full_input(ansible_zos_module): @@ -702,66 +669,46 @@ def test_job_submit_full_input(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_NO_DSN) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'completion code', repr(result.get("msg"))) -# assert result.get("jobs")[0].get("job_id") is not None - -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# Should have a JCL ERROR <int> -# def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_INVALID_USER) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'return code was not available', repr(result.get("msg"))) -# assert re.search(r'error SEC', repr(result.get("msg"))) -# assert result.get("jobs")[0].get("job_id") is not None -# assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) +def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_NO_DSN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'completion code', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'return code was not available', repr(result.get("msg"))) -# assert re.search(r'error ? ?', repr(result.get("msg"))) -# assert result.get("jobs")[0].get("job_id") is not None -# assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" +# Should have a JCL ERROR <int> +def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_INVALID_USER) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error SEC', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + + +def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error ? ?', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" From 731f39e15604334195b84ce93ca47162214e42a8 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 20 Feb 2024 14:32:04 -0600 Subject: [PATCH 304/413] [v1.10.0][zos_archive] Migrate zos_archive to use ZOAU 1.3 (#1227) * Removed helpers * Updated code to use 1.3 * Removed to_dict * Added changelog * Added changlog * Update zos_unarchive.py * Removed command calls --- .../fragments/1227-migrate-zos_archive.yml | 3 +++ plugins/modules/zos_archive.py | 25 +++++++++---------- 2 files changed, 15 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1227-migrate-zos_archive.yml diff --git a/changelogs/fragments/1227-migrate-zos_archive.yml b/changelogs/fragments/1227-migrate-zos_archive.yml new file mode 100644 index 000000000..820593c95 --- /dev/null +++ b/changelogs/fragments/1227-migrate-zos_archive.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1227). diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index f5306bb25..959d263d9 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -424,7 +424,7 @@ mvs_cmd, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) import os import tarfile @@ -433,13 +433,14 @@ import glob import re import math +import traceback from hashlib import sha256 try: from zoautil_py import datasets except Exception: - Datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) XMIT_RECORD_LENGTH = 80 AMATERSE_RECORD_LENGTH = 1024 @@ -789,11 +790,9 @@ def _create_dest_data_set( if tmp_hlq: hlq = tmp_hlq else: - rc, hlq, err = self.module.run_command("hlq") - hlq = hlq.replace('\n', '') - cmd = "mvstmphelper {0}.DZIP".format(hlq) - rc, temp_ds, err = self.module.run_command(cmd) - arguments.update(name=temp_ds.replace('\n', '')) + hlq = datasets.get_hlq() + temp_ds = datasets.tmp_name(high_level_qualifier=hlq) + arguments.update(name=temp_ds) if record_format is None: arguments.update(record_format="FB") @@ -902,8 +901,8 @@ def expand_mvs_paths(self, paths): expanded_path = [] for path in paths: if '*' in path: - e_paths = datasets.listing(path) - e_paths = [path.name for path in e_paths] + # list_dataset_names returns a list of data set names or empty. + e_paths = datasets.list_dataset_names(path) else: e_paths = [path] expanded_path.extend(e_paths) @@ -946,11 +945,11 @@ def compute_dest_size(self): {int} - Destination computed space in kilobytes. """ if self.dest_data_set.get("space_primary") is None: - dest_space = 0 + dest_space = 1 for target in self.targets: - data_sets = datasets.listing(target) + data_sets = datasets.list_datasets(target) for ds in data_sets: - dest_space += int(ds.to_dict().get("total_space")) + dest_space += int(ds.total_space) # space unit returned from listings is bytes dest_space = math.ceil(dest_space / 1024) self.dest_data_set.update(space_primary=dest_space, space_type="K") From 066864e09829890f79ebc8d619d5162cc676de15 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 20 Feb 2024 15:26:18 -0600 Subject: [PATCH 305/413] [v1.10.0][zos_find] Removed lineinfile dependency and test with ZOAU 1.3 (#1228) * Removed lineinfile dependency * Added changelog * Updated copyright years * Updated copyright years --- .../1228-zos_find-remove-zos_lineinfile_dep.yml | 3 +++ tests/functional/modules/test_zos_find_func.py | 10 +++++----- 2 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml diff --git a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml new file mode 100644 index 000000000..67642d563 --- /dev/null +++ b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml @@ -0,0 +1,3 @@ +trivial: + - zos_find - Removed zos_lineinfile dependency from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1228). diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 50782be0b..3a30d9510 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -63,7 +63,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES] ) for ds in SEQ_NAMES: - hosts.all.zos_lineinfile(src=ds, line=search_string) + hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") find_res = hosts.all.zos_find( patterns=['TEST.FIND.SEQ.*.*'], @@ -91,9 +91,9 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES] ) hosts.all.zos_data_set(name=new_ds, type='seq', state='present') - hosts.all.zos_lineinfile(src=new_ds, line="incorrect string") + hosts.all.shell(cmd=f"decho 'incorrect string' \"{new_ds}\" ") for ds in SEQ_NAMES: - hosts.all.zos_lineinfile(src=ds, line=search_string) + hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") find_res = hosts.all.zos_find( patterns=['TEST.FIND.SEQ.*.*', 'TEST.INVALID.*'], @@ -131,7 +131,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): ] ) for ds in PDS_NAMES: - hosts.all.zos_lineinfile(src=ds + "(MEMBER)", line=search_string) + hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], From 1de0bc1aa35a3bcfe85facf864b698f990930bb2 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 20 Feb 2024 15:49:42 -0600 Subject: [PATCH 306/413] [v1.10.0][zos_fetch] Migrate zos_fetch to ZOAU 1.3 (#1229) * Modified code to use ZOAU 1.3 * Updated job_submit call * Add fragment * Added copyright year --- .../fragments/1229-migrate-zos_fetch.yml | 3 ++ plugins/modules/zos_fetch.py | 32 +++++++++---------- .../functional/modules/test_zos_fetch_func.py | 2 +- 3 files changed, 20 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1229-migrate-zos_fetch.yml diff --git a/changelogs/fragments/1229-migrate-zos_fetch.yml b/changelogs/fragments/1229-migrate-zos_fetch.yml new file mode 100644 index 000000000..07f9a26b4 --- /dev/null +++ b/changelogs/fragments/1229-migrate-zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1229). diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index d8b15c0d9..2b32f0760 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -271,7 +271,7 @@ import tempfile import re import os - +import traceback from math import ceil from shutil import rmtree from ansible.module_utils.basic import AnsibleModule @@ -285,16 +285,16 @@ validation, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: - from zoautil_py import datasets, mvscmd, types + from zoautil_py import datasets, mvscmd, ztypes except Exception: - datasets = MissingZOAUImport() - mvscmd = MissingZOAUImport() - types = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + mvscmd = ZOAUImportError(traceback.format_exc()) + ztypes = ZOAUImportError(traceback.format_exc()) class FetchHandler: @@ -373,23 +373,23 @@ def _copy_vsam_to_temp_data_set(self, ds_name): dd_statements = [] dd_statements.append( - types.DDStatement( - name="sysin", definition=types.DatasetDefinition(sysin) + ztypes.DDStatement( + name="sysin", definition=ztypes.DatasetDefinition(sysin) ) ) dd_statements.append( - types.DDStatement( - name="input", definition=types.DatasetDefinition(ds_name) + ztypes.DDStatement( + name="input", definition=ztypes.DatasetDefinition(ds_name) ) ) dd_statements.append( - types.DDStatement( - name="output", definition=types.DatasetDefinition(out_ds_name) + ztypes.DDStatement( + name="output", definition=ztypes.DatasetDefinition(out_ds_name) ) ) dd_statements.append( - types.DDStatement( - name="sysprint", definition=types.FileDefinition(sysprint) + ztypes.DDStatement( + name="sysprint", definition=ztypes.FileDefinition(sysprint) ) ) @@ -591,7 +591,7 @@ def run_module(): src = module.params.get("src") if module.params.get("use_qualifier"): - module.params["src"] = datasets.hlq() + "." + src + module.params["src"] = datasets.get_hlq() + "." + src # ********************************************************** # # Verify paramater validity # diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 357540876..b239bbbd9 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -264,7 +264,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait=True + src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait_time_s=30 ) hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( From d739905aa9d4ce13f34e696a4dd1c3638d45130e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 21 Feb 2024 12:10:10 -0600 Subject: [PATCH 307/413] [v1.10.0][Zos Operator Action Query]Migrate zos operator action query (#1215) * Add timeout x100 * Add error messages for false positives and migrate module * Fix ansible-lint * Fix test case new iteration * Return previous behaviour * Return behaviour * Fix ansible lint * Fix ansible lint * Fix ansible lint * Add fragment * Revert "Add fragment" This reverts commit a434c410cb8746ed65c69eba905137bcc7307708. * Add fragment * Fix commends on variable names and coments * Change to timeout_S --- ...1215-Migrate_zos_operator_action_query.yml | 4 ++ plugins/modules/zos_operator_action_query.py | 37 ++++++++++--------- .../test_zos_operator_action_query_func.py | 2 +- 3 files changed, 24 insertions(+), 19 deletions(-) create mode 100644 changelogs/fragments/1215-Migrate_zos_operator_action_query.yml diff --git a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml new file mode 100644 index 000000000..be18056b3 --- /dev/null +++ b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml @@ -0,0 +1,4 @@ +trivial: + - zos_operator_action_query - Update internal functions to account for the change to the + unit of measurement of `timeout` now in centiseconds. + (https://github.com/ansible-collections/ibm_zos_core/pull/1215). \ No newline at end of file diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 022708692..55cd7cd00 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -219,11 +219,12 @@ from ansible.module_utils.basic import AnsibleModule import re +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( @@ -233,7 +234,7 @@ try: from zoautil_py import opercmd except Exception: - opercmd = MissingZOAUImport() + opercmd = ZOAUImportError(traceback.format_exc()) def run_module(): @@ -272,7 +273,7 @@ def run_module(): cmdtxt = "d r,a,s" - cmd_result_a = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + cmd_result_a = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) if cmd_result_a.rc > 0: module.fail_json( @@ -287,7 +288,7 @@ def run_module(): cmdtxt = "d r,a,jn" - cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + cmd_result_b = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) if cmd_result_b.rc > 0: module.fail_json( @@ -395,35 +396,35 @@ def filter_requests(merged_list, params): message_id = params.get("message_id") job_name = params.get("job_name") newlist = merged_list - if system: newlist = handle_conditions(newlist, "system", system) if job_name: newlist = handle_conditions(newlist, "job_name", job_name) if message_id: newlist = handle_conditions(newlist, "message_id", message_id) - return newlist -def handle_conditions(list, condition_type, value): +def handle_conditions(merged_list, condition_type, value): # regex = re.compile(condition_values) newlist = [] - for dict in list: - if value.endswith("*"): - exist = dict.get(condition_type).startswith(value.rstrip("*")) - else: - exist = dict.get(condition_type) == value + exist = False + for message in merged_list: + if message.get(condition_type) is not None: + if value.endswith("*"): + exist = message.get(condition_type).startswith(value.rstrip("*")) + else: + exist = message.get(condition_type) == value if exist: - newlist.append(dict) + newlist.append(message) return newlist -def execute_command(operator_cmd, timeout=1, *args, **kwargs): - - # response = opercmd.execute(operator_cmd) - response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) +def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: + timeout_c = 100 * timeout_s + response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) rc = response.rc stdout = response.stdout_response diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index c7afab2f9..950e6900f 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 80bbc263923f92a5c719afc96cdb560995466e85 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Sat, 24 Feb 2024 15:07:52 -0600 Subject: [PATCH 308/413] [v1.10.0][zos_unarchive] Migrate zos_unarchive to use ZOAU 1.3 (#1238) * Update calls to datasets API * Update tests * Update zos_job_submit tests that depend on zos_copy * Add changelog fragment * Enable tests that depend on zos_encode * Remove calls to datasets._copy * Fixed pep8 issue * Fixed bug when copying from MVS to USS * Removed helpers * Updated code to use 1.3 * Removed to_dict * Added changelog * Added changlog * Update zos_unarchive.py * Initial changes for zos_unarchive * Removed command calls * Commented test cases * Added changelog * Uncommented test cases * Update zos_unarchive.py * Update test_zos_unarchive_func.py --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- .../fragments/1238-migrate-zos_unarchive.yml | 4 ++++ plugins/modules/zos_unarchive.py | 19 +++++++++---------- .../modules/test_zos_unarchive_func.py | 14 +++++++++----- 3 files changed, 22 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/1238-migrate-zos_unarchive.yml diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml new file mode 100644 index 000000000..6cb8861c9 --- /dev/null +++ b/changelogs/fragments/1238-migrate-zos_unarchive.yml @@ -0,0 +1,4 @@ +trivial: + - zos_archive - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1238). +s \ No newline at end of file diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 9ab1409ca..fcbda95e1 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -392,14 +392,15 @@ import os import zipfile import tarfile +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: from zoautil_py import datasets except Exception: - Datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) data_set_regex = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}" @@ -646,8 +647,8 @@ def _compute_dest_data_set_size(self): """ # Get the size from the system - src_attributes = datasets.listing(self.src)[0] - # The size returned by listing is in bytes. + src_attributes = datasets.list_datasets(self.src)[0] + # The size returned by list_datasets is in bytes. source_size = int(src_attributes.total_space) if self.format == 'terse': source_size = int(source_size * 1.5) @@ -687,11 +688,9 @@ def _create_dest_data_set( if tmp_hlq: hlq = tmp_hlq else: - rc, hlq, err = self.module.run_command("hlq") - hlq = hlq.replace('\n', '') - cmd = "mvstmphelper {0}.RESTORE".format(hlq) - rc, temp_ds, err = self.module.run_command(cmd) - arguments.update(name=temp_ds.replace('\n', '')) + hlq = datasets.get_hlq() + temp_ds = datasets.tmp_name(high_level_qualifier=hlq) + arguments.update(name=temp_ds) if record_format is None: arguments.update(record_format="FB") if record_length is None: diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index c0b1fe293..28cc0d77d 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -281,6 +281,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, format): finally: hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + @pytest.mark.uss def test_uss_unarchive_copy_to_remote(ansible_zos_module): try: @@ -370,7 +371,6 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec DATASET = get_tmp_ds_name(3) HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( @@ -379,6 +379,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec state="present", record_length=record_length, record_format=record_format, + replace=True ) # Create members if needed if data_set.get("dstype") in ["PDS", "PDSE"]: @@ -386,7 +387,8 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec hosts.all.zos_data_set( name=f"{DATASET}({member})", type="member", - state="present" + state="present", + replace=True ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW @@ -480,7 +482,6 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d DATASET = get_tmp_ds_name(3) HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( @@ -489,6 +490,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d state="present", record_length=record_length, record_format=record_format, + replace=True ) # Create members if needed if data_set.get("dstype") in ["PDS", "PDSE"]: @@ -496,7 +498,8 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d hosts.all.zos_data_set( name=f"{DATASET}({member})", type="member", - state="present" + state="present", + replace=True ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW @@ -962,6 +965,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + @pytest.mark.ds @pytest.mark.parametrize( "format", [ From a21b18aba6cc022d211b693323ffbf1d36eda429 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 26 Feb 2024 09:55:32 -0700 Subject: [PATCH 309/413] [v1.10.0] [zos_mount] zos_mount ZOAU v1.3.0 migration (#1237) * Update tests * Add changelog fragment * Re-enabled zos_copy-dependent tests * Update copyright --- .../fragments/1237-migrate-zos_mount.yml | 4 ++++ .../functional/modules/test_zos_mount_func.py | 21 +------------------ 2 files changed, 5 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/1237-migrate-zos_mount.yml diff --git a/changelogs/fragments/1237-migrate-zos_mount.yml b/changelogs/fragments/1237-migrate-zos_mount.yml new file mode 100644 index 000000000..d4787d42d --- /dev/null +++ b/changelogs/fragments/1237-migrate-zos_mount.yml @@ -0,0 +1,4 @@ +trivial: + - tests/functional/modules/test_zos_mount_func.py - migrate code to use + ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1237). diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 8883ddebc..1ec7c03f5 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021, 2022 +# Copyright (c) IBM Corporation 2020 - 2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function @@ -9,22 +9,9 @@ import tempfile -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - data_set, -) - -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, -) - from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name -try: - from zoautil_py import Datasets -except Exception: - Datasets = MissingZOAUImport() - INITIAL_PRM_MEMBER = """/* Initial file to look like BPXPRM */ /* some settings at the top */ @@ -79,9 +66,6 @@ def create_sourcefile(hosts, volume): starter, thisfile, str(type(thisfile)) ) ) - # fs_du = data_set.DataSetUtils(thisfile) - # fs_exists = fs_du.exists() - # if fs_exists is False: hosts.all.shell( cmd="zfsadm define -aggregate " @@ -338,9 +322,6 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst assert srcfn in data assert "bpxtablecomment - try this" in data - # fs_du = data_set.DataSetUtils(back_dest_path) - # fs_exists = fs_du.exists() - # assert fs_exists finally: hosts.all.zos_mount( src=srcfn, From 8a6e2b87f911687cd1c570fc787dccc7ded1f8ec Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 26 Feb 2024 08:55:54 -0800 Subject: [PATCH 310/413] Update release notes for V3R1 (#1226) * Update release notes for V3R1 Signed-off-by: ddimatos <dimatos@gmail.com> * Update bug issue template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collaboration issue template Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc issue template Signed-off-by: ddimatos <dimatos@gmail.com> * Update enabler template Signed-off-by: ddimatos <dimatos@gmail.com> * Update feature template Signed-off-by: ddimatos <dimatos@gmail.com> * Update module template Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 116 ++++++++-------- .../ISSUE_TEMPLATE/collaboration_issue.yml | 127 +++++++++--------- .github/ISSUE_TEMPLATE/doc_issue.yml | 26 ++-- .github/ISSUE_TEMPLATE/enabler_issue.yml | 22 +-- .../enhancement_feature.issue.yml | 21 +-- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- docs/source/release_notes.rst | 24 ++-- 7 files changed, 177 insertions(+), 161 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index e03266e7b..2193cb615 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,6 +1,6 @@ name: Report a bug description: Request that a bug be reviewed. Complete all required fields. -title: "[Bug] <title> " +title: "[Bug] Enter description" labels: [Bug] assignees: - IBMAnsibleHelper @@ -13,22 +13,60 @@ body: options: - label: There are no existing issues. required: true - - type: checkboxes - id: valid-dependencies + - type: textarea + id: issue-description attributes: - label: Are the dependencies a supported version? - description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + label: Bug description + description: Describe the bug you are experiencing. + placeholder: | + Verbosity is encouraged, the more you share the better for us to understand. + 1. Include the steps to reproduce + 2. Include playbook if applicable + 3. Include screen captures of applicable + 4. Include expected vs actual results if applicable + validations: + required: true + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false options: - - label: The dependencies are supported. - required: true + - v1.12.0 + - v1.12.0-beta.1 + - v1.11.0 + - v1.11.0-beta.1 + - v1.10.0 + - v1.10.0-beta.1 + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 (default) + - v1.7.0 + - v1.6.0 + - v1.5.0 + - v1.4.1 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + default: 8 + validations: + required: true - type: dropdown id: zoau-version attributes: label: IBM Z Open Automation Utilities - description: Which version of ZOAU are you using? + description: Which version of ZOAU are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v1.2.5 + - v1.3.4 + - v1.3.3 + - v1.3.2 + - v1.3.1 + - v1.3.0 + - v1.2.5 (default) - v1.2.4 - v1.2.3 - v1.2.2 @@ -36,40 +74,23 @@ body: - v1.2.0 - v1.1.1 - v1.0.3 + default: 5 validations: required: true - type: dropdown id: python-version attributes: label: IBM Enterprise Python - description: Which version of IBM Enterprise Python are you using? + description: Which version of IBM Enterprise Python are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: + - v3.13.x - v3.12.x - - v3.11.x + - v3.11.x (default) - v3.10.x - v3.9.x - v3.8.x - validations: - required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). - multiple: false - options: - - v1.8.0-beta.1 - - v1.7.0 - - v1.7.0-beta.1 - - v1.6.0 - - v1.6.0-beta.1 - - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.2.1 - - v1.1.0 - - v1.0.0 + default: 2 validations: required: true - type: dropdown @@ -79,27 +100,24 @@ body: description: What is the version of Ansible on the controller (`ansible --version`)? multiple: false options: - - latest - - v2.16.x + - v2.17.x + - v2.16.x (default) - v2.15.x - v2.14.x - - v2.13.x - - v2.12.x - - v2.11.x - - v2.9.x + default: 1 validations: required: true - type: dropdown id: zos-version attributes: label: z/OS version - description: What is the version of z/OS on the managed node? + description: What is the version of z/OS on the managed node? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v3.1 - - v2.5 + - v3.1 (unsupported) + - v2.5 (default) - v2.4 - - v2.3 + default: 1 validations: required: false - type: dropdown @@ -110,6 +128,7 @@ body: multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -129,21 +148,10 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: required: false - - type: textarea - id: issue-description - attributes: - label: Bug description - description: Describe the bug you are experiencing. - placeholder: | - Verbosity is encouraged, the more you share the better for us to understand. - 1. Include the steps to reproduce - 2. Include playbook if applicable - 3. Include screen captures of applicable - 4. Include expected vs actual results if applicable - validations: - required: true - type: textarea id: issue-output attributes: diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index f601ce1e1..fb8ff3a00 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -23,24 +23,59 @@ body: required: false - label: No, support and service is involved. required: false - - type: checkboxes - id: valid-dependencies + - type: textarea + id: issue-description + attributes: + label: Collaboration description + description: Describe the collaboration issue. + placeholder: | + For example + 1. Working with IBM Enterprise Python to resolve issue xyz. + 2. Working with z/OS application team DFSMS to resolve xyz. + 3. Assisting IBM support to resolve an ibm_zos_copy issue. + validations: + required: true + - type: dropdown + id: collection-version attributes: - label: Are the dependencies a supported? - description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + label: IBM z/OS Ansible core Version + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false options: - - label: Yes, the dependencies are supported. - required: false - - label: Not applicable to this collaboration. - required: false + - v1.12.0 + - v1.12.0-beta.1 + - v1.11.0 + - v1.11.0-beta.1 + - v1.10.0 + - v1.10.0-beta.1 + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 (default) + - v1.7.0 + - v1.6.0 + - v1.5.0 + - v1.4.1 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + default: 8 + validations: + required: false - type: dropdown id: zoau-version attributes: label: IBM Z Open Automation Utilities - description: Which version of ZOAU are you using? + description: Which version of ZOAU are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v1.2.5 + - v1.3.4 + - v1.3.3 + - v1.3.2 + - v1.3.1 + - v1.3.0 + - v1.2.5 (default) - v1.2.4 - v1.2.3 - v1.2.2 @@ -48,45 +83,23 @@ body: - v1.2.0 - v1.1.1 - v1.0.3 + default: 5 validations: required: false - type: dropdown id: python-version attributes: label: IBM Enterprise Python - description: Which version of IBM Enterprise Python are you using? - multiple: true + description: Which version of IBM Enterprise Python are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + multiple: false options: - - v3.14.x - v3.13.x - v3.12.x - - v3.11.x + - v3.11.x (default) - v3.10.x - v3.9.x - v3.8.x - validations: - required: false - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). - multiple: false - options: - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 - - v1.8.0-beta.1 - - v1.7.0 - - v1.7.0-beta.1 - - v1.6.0 - - v1.6.0-beta.1 - - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.2.1 - - v1.1.0 - - v1.0.0 + default: 2 validations: required: false - type: dropdown @@ -96,27 +109,24 @@ body: description: What is the version of Ansible on the controller (`ansible --version`)? multiple: false options: - - latest - - v2.16.x + - v2.17.x + - v2.16.x (default) - v2.15.x - v2.14.x - - v2.13.x - - v2.12.x - - v2.11.x - - v2.9.x + default: 1 validations: required: false - type: dropdown id: zos-version attributes: label: z/OS version - description: What is the version of z/OS on the managed node? + description: What is the version of z/OS on the managed node? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v3.1 - - v2.5 + - v3.1 (unsupported) + - v2.5 (default) - v2.4 - - v2.3 + default: 1 validations: required: false - type: dropdown @@ -127,6 +137,7 @@ body: multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -146,23 +157,7 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: - required: false - - type: textarea - id: issue-description - attributes: - label: Collaboration description - description: Describe the collaboration issue. - placeholder: | - For example - 1. Working with IBM Enterprise Python to resolve issue xyz. - 2. Working with z/OS application team DFSMS to resolve xyz. - 3. Assisting IBM support to resolve an ibm_zos_copy issue. - validations: - required: true - - - - - - + required: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 38a8f1818..dcc6dfda2 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -31,33 +31,39 @@ body: id: collection-version attributes: label: IBM z/OS Ansible core Version - description: Which version of z/OS Ansible core collection are you reporting a documentation bug. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.12.0 + - v1.12.0-beta.1 + - v1.11.0 + - v1.11.0-beta.1 + - v1.10.0 + - v1.10.0-beta.1 - v1.9.0 - v1.9.0-beta.1 - - v1.8.0 - - v1.8.0-beta.1 + - v1.8.0 (default) - v1.7.0 - - v1.7.0-beta.1 - v1.6.0 - - v1.6.0-beta.1 - v1.5.0 - v1.4.1 - v1.3.6 - - v1.2.1 - - v1.1.0 - - v1.0.0 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + default: 8 validations: required: false - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported in this doc issue. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -77,5 +83,7 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: required: false diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index d520148dc..c9584acfd 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -15,14 +15,23 @@ body: options: - label: There are no existing issues. required: true + - type: textarea + id: issue-description + attributes: + label: Enabler description + description: Describe the task. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported for this task. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -42,13 +51,8 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: required: false - - type: textarea - id: issue-description - attributes: - label: Enabler description - description: Describe the task, this is the equivalent of a agile story. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. - validations: - required: true + diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index f190ee70c..98adbf65b 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -13,15 +13,23 @@ body: options: - label: There are no existing issues. required: true + - type: textarea + id: issue-description + attributes: + label: Enhancement or feature description + description: Describe the enhancement or feature you are requesting. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported in this enhancement or feature. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - - zos_archive - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -42,14 +50,7 @@ body: - zos_script - zos_tso_command - zos_unarchive - validations: - required: true - - type: textarea - id: issue-description - attributes: - label: Enhancement or feature description - description: Describe the enhancement or feature you are requesting. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. + - zos_volume_init validations: required: true diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index a7e7dcfa1..7723b85f1 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -1,6 +1,6 @@ name: Request a new module description: Request a new module be added to the collection. Complete all required fields. -title: "[Module] <title> " +title: "[Module] Enter description " labels: [Module] assignees: - IBMAnsibleHelper diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 6770aa879..726c1b64c 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -73,7 +73,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. @@ -159,7 +159,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3. @@ -226,7 +226,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. @@ -289,7 +289,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -405,7 +405,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -444,7 +444,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -594,7 +594,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -695,7 +695,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 - `IBM Open Enterprise SDK for Python`_ v3.9.5 @@ -736,7 +736,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -782,7 +782,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -814,7 +814,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -937,7 +937,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and From 5f2b4cdae19d062470748c96dce2a9758ca0db53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 26 Feb 2024 20:17:44 -0600 Subject: [PATCH 311/413] [Enabler][1137]Validate_module_zos_job_output_migration_1.3.0 (#1216) * Add manage exception for ZOAU 1.3 * Comment test on dependencies of zos job submit * Adapt test cases to new expections * Adapt test cases to new expections * Adapt test cases to new expections * Adapt test cases to new expections * Adapt test cases to new expections * Add fragment * Validate test_zos_job_output_with_job_submit * Add alias to exception * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add validation * Add validation * Add time and validation * Add time and validation * Add time and validation * Add print * Add print * Add job * Add job * Fix job utils for output * Remove print * Add mesage to fail * Return test cases * Fix Typo * Add job output * Remove print * Move exeptions --- ...lidate_module_zos_job_output_migration.yml | 3 ++ plugins/module_utils/job.py | 28 +++++++++---------- plugins/modules/zos_job_output.py | 15 ++++++++++ .../modules/test_zos_job_output_func.py | 21 +++++++------- 4 files changed, 42 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml diff --git a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml new file mode 100644 index 000000000..65d3d3c08 --- /dev/null +++ b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_output - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1216). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 4a432d764..af96c6ab6 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -318,18 +318,18 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T for single_dd in list_of_dds: dd = {} - if "dataset" not in single_dd: + if "dd_name" not in single_dd: continue # If dd_name not None, only that specific dd_name should be returned if dd_name is not None: - if dd_name not in single_dd["dataset"]: + if dd_name not in single_dd["dd_name"]: continue else: - dd["ddname"] = single_dd["dataset"] + dd["ddname"] = single_dd["dd_name"] - if "recnum" in single_dd: - dd["record_count"] = single_dd["recnum"] + if "records" in single_dd: + dd["record_count"] = single_dd["records"] else: dd["record_count"] = None @@ -338,28 +338,28 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T else: dd["id"] = "?" - if "stepname" in single_dd: - dd["stepname"] = single_dd["stepname"] + if "step_name" in single_dd: + dd["stepname"] = single_dd["step_name"] else: dd["stepname"] = None if "procstep" in single_dd: dd["procstep"] = single_dd["procstep"] else: - dd["proctep"] = None + dd["procstep"] = None - if "length" in single_dd: - dd["byte_count"] = single_dd["length"] + if "record_length" in single_dd: + dd["byte_count"] = single_dd["record_length"] else: dd["byte_count"] = 0 tmpcont = None - if "stepname" in single_dd: - if "dataset" in single_dd: + if "step_name" in single_dd: + if "dd_name" in single_dd: tmpcont = jobs.read_output( entry.job_id, - single_dd["stepname"], - single_dd["dataset"] + single_dd["step_name"], + single_dd["dd_name"] ) dd["content"] = tmpcont.split("\n") diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 40c7d61d0..ed5a182d3 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -414,12 +414,20 @@ from ansible.module_utils.basic import AnsibleModule +import traceback +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError, +) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( job_output, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser ) +try: + from zoautil_py import exceptions as zoau_exceptions +except Exception: + zoau_exceptions = ZOAUImportError(traceback.format_exc()) def run_module(): @@ -461,6 +469,13 @@ def run_module(): results = {} results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=ddname) results["changed"] = False + except zoau_exceptions.JobFetchException as fetch_exception: + module.fail_json( + msg="ZOAU exception", + rc=fetch_exception.response.rc, + stdout=fetch_exception.response.stdout_response, + stderr=fetch_exception.response.stderr_response, + ) except Exception as e: module.fail_json(msg=repr(e)) diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 830828769..584cd6d6d 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -31,8 +31,6 @@ """ TEMP_PATH = "/tmp/jcl" -JOB_NOT_FOUND_MSG_TXT="The job with the name * could not be found." -JOB_NOT_FOUND_MSG_TXT_ID="The job with the job_id INVALID could not be found." def test_zos_job_output_no_job_id(ansible_zos_module): hosts = ansible_zos_module @@ -47,7 +45,8 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module): results = hosts.all.zos_job_output(job_id="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT_ID + assert result.get("stderr") is not None + assert result.get("failed") is True def test_zos_job_output_no_job_name(ansible_zos_module): @@ -63,7 +62,7 @@ def test_zos_job_output_invalid_job_name(ansible_zos_module): results = hosts.all.zos_job_output(job_name="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get('job_name') == "INVALID" + assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None def test_zos_job_output_no_owner(ansible_zos_module): @@ -71,7 +70,7 @@ def test_zos_job_output_no_owner(ansible_zos_module): results = hosts.all.zos_job_output(owner="") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs") is None + assert result.get("msg") is not None def test_zos_job_output_invalid_owner(ansible_zos_module): @@ -79,7 +78,7 @@ def test_zos_job_output_invalid_owner(ansible_zos_module): results = hosts.all.zos_job_output(owner="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT + assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None def test_zos_job_output_reject(ansible_zos_module): @@ -100,10 +99,10 @@ def test_zos_job_output_job_exists(ansible_zos_module): ) jobs = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None ) - for job in jobs.contacted.values(): + print(job) assert job.get("jobs") is not None for job in jobs.contacted.values(): @@ -127,8 +126,8 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) - hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + result = hosts.all.zos_job_submit( + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" @@ -147,4 +146,4 @@ def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") for result in results.contacted.values(): - assert result.get("jobs") is not None + assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None From 73ba2c1174f4e22a4796f9c23072f62ea2ace497 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Tue, 27 Feb 2024 09:46:35 -0800 Subject: [PATCH 312/413] [v1.10.0] [zos_data_set] ZOAU 1.3 migration - zos_data_set (#1242) * remove deprecated 'wait=True' from call to zos_job_submit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add temporary fixes to module_util/data_set.py to catch DatasetVerificationError and strip '-' from parsed volser Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor volser extraction for better readability Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add work-around for potentially errouneous DatasetVerificationError when a data set spanning multiple volumes is created Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 issue - remove blank line Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove error catch introduced during debugging Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update error message when DatasetVerificationError is raised Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DatasetCreateError definition to account for instance where no RC exists Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add indent to address pep8 issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../1242-zoau-migration-zos_data_set.yml | 3 ++ plugins/module_utils/data_set.py | 43 ++++++++++++++----- plugins/modules/zos_data_set.py | 2 +- .../modules/test_zos_data_set_func.py | 12 +++--- 4 files changed, 43 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1242-zoau-migration-zos_data_set.yml diff --git a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml new file mode 100644 index 000000000..851783900 --- /dev/null +++ b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml @@ -0,0 +1,3 @@ +trivial: + - zos_data_set - Refactor data_set module_util and functional tests for ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1242). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 33b1958b4..34346dc12 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -177,13 +177,14 @@ def ensure_present( changed = False if DataSet.data_set_cataloged(name): present = True + if not present: try: DataSet.create(**arguments) except DatasetCreateError as e: raise_error = True # data set exists on volume - if "Error Code: 0x4704" in e.msg: + if "DatasetVerificationError" in e.msg or "Error Code: 0x4704" in e.msg: present, changed = DataSet.attempt_catalog_if_necessary( name, volumes ) @@ -355,6 +356,7 @@ def data_set_cataloged(name, volumes=None): """ name = name.upper() + module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) rc, stdout, stderr = module.run_command( @@ -386,9 +388,14 @@ def data_set_cataloged_volume_list(name): "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin ) delimiter = 'VOLSER------------' - arr = stdout.split(delimiter) - # A volume serial (VOLSER) is not always of fixed length, use ":x.find(' ')" here instead of arr[index]. - volume_list = list(set([x[:x.find(' ')] for x in arr[1:]])) + arr = stdout.split(delimiter)[1:] # throw away header + + # Volume serials (VOLSER) under 6 chars will have one or more leading '-'s due to the chosen delimiter. + # The volser is in between the beginning of each str and the first space. + # Strip away any leading '-'s, then split on the next whitespace and throw away the remaining in each str. + volume_list = [x.strip('-').split()[0] for x in arr] + + volume_list = list(set(volume_list)) # remove duplicates, order doesn't matter return volume_list @staticmethod @@ -1015,12 +1022,21 @@ def create( formatted_args = DataSet._build_zoau_args(**original_args) try: datasets.create(**formatted_args) - except (exceptions.ZOAUException, exceptions.DatasetVerificationError) as create_exception: + except exceptions.ZOAUException as create_exception: raise DatasetCreateError( name, create_exception.response.rc, create_exception.response.stdout_response + create_exception.response.stderr_response ) + except exceptions.DatasetVerificationError as e: + # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 + if len(volumes) > 1: + if DataSet.data_set_cataloged(name, volumes): + return 0 + raise DatasetCreateError( + name, + msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.", + ) # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned # response.rc now we just return 0 if nothing failed return 0 @@ -1778,12 +1794,19 @@ def __init__(self, data_set, rc): class DatasetCreateError(Exception): - def __init__(self, data_set, rc, msg=""): - self.msg = ( - 'An error occurred during creation of data set "{0}". RC={1}, {2}'.format( - data_set, rc, msg + def __init__(self, data_set, rc=None, msg=""): + if rc: + self.msg = ( + 'An error occurred during creation of data set "{0}". RC={1}, {2}'.format( + data_set, rc, msg + ) + ) + else: + self.msg = ( + 'An error occurred during creation of data set "{0}". {1}'.format( + data_set, msg + ) ) - ) super().__init__(self.msg) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 73af4acf1..8b0485826 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index f5568f55e..28882d9ce 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -160,7 +160,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True, wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", location="USS", wait_time_s=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -215,7 +215,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True + src=TEMP_PATH + "/SAMPLE", location="USS" ) # verify data set creation was successful for result in results.contacted.values(): @@ -260,7 +260,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True + src=TEMP_PATH + "/SAMPLE", location="USS" ) # verify data set creation was successful for result in results.contacted.values(): @@ -308,7 +308,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True + src=TEMP_PATH + "/SAMPLE", location="USS" ) # verify data set creation was successful for result in results.contacted.values(): @@ -345,7 +345,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") # verify data set creation was successful for result in results.contacted.values(): @@ -360,7 +360,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") # verify data set creation was successful for result in results.contacted.values(): From cf123ae5a80938f88469055cb5c9811e7cd0a72f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 29 Feb 2024 11:47:36 -0600 Subject: [PATCH 313/413] Removed trailing char from changelog (#1266) --- changelogs/fragments/1238-migrate-zos_unarchive.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml index 6cb8861c9..8afe97d29 100644 --- a/changelogs/fragments/1238-migrate-zos_unarchive.yml +++ b/changelogs/fragments/1238-migrate-zos_unarchive.yml @@ -1,4 +1,3 @@ trivial: - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1238). -s \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/1238). \ No newline at end of file From 267ffa7ce29b56b6a9eaf784c13b418cc032bf02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 29 Feb 2024 13:19:25 -0600 Subject: [PATCH 314/413] [Enabler][Migration]Migrate_zos_blockinfile_and_lineinfile (#1256) * Add change of blockinfile * Check output * Add dataset option * Fix blockinfile * Remove test case with bug * Migrate lineinfile * Comment fail cases * Fix space * Fix documentation * Add fragment * Add correct dataset import * Add gh issue to lineinfile * Add gh issue to test * Remove force * Updated copyright years * Add explanation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...Migrate_zos_blockinfile_and_lineinfile.yml | 4 + plugins/modules/zos_blockinfile.py | 62 +++--- plugins/modules/zos_lineinfile.py | 57 +++--- .../modules/test_zos_blockinfile_func.py | 40 ++-- .../modules/test_zos_lineinfile_func.py | 180 +++++++++--------- 5 files changed, 169 insertions(+), 174 deletions(-) create mode 100644 changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml diff --git a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml new file mode 100644 index 000000000..e2e841e9c --- /dev/null +++ b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml @@ -0,0 +1,4 @@ +trivial: + - zos_lineinfile - migrate code to use ZOAU v1.3.0. + - zos_blockinfile - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1256). diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 7a2adf7cc..8fd9701da 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -325,17 +325,18 @@ """ import json +import traceback from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: from zoautil_py import datasets except Exception: - Datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) # supported data set types @@ -379,14 +380,15 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): - BOF - '*regex*' encoding: {str} -- Encoding of the src. - force: {str} -- If not empty passes the -f option to dmod cmd. + force: {bool} -- If not empty passes True option to dmod cmd. Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, options=force, as_json=True) + return datasets.blockinfile(src, True, block=block, marker=marker, insert_after=ins_aft, + insert_before=ins_bef, encoding=encoding, force=force, as_json=True) def absent(src, marker, encoding, force): @@ -395,14 +397,14 @@ def absent(src, marker, encoding, force): src: {str} -- The z/OS USS file or data set to modify. marker: {str} -- Identifies the block to be removed. encoding: {str} -- Encoding of the src. - force: {str} -- If not empty passes the -f option to dmod cmd. + force: {bool} -- If not empty passes the value True option to dmod cmd. Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, options=force, as_json=True) + return datasets.blockinfile(src, False, marker=marker, encoding=encoding, force=force, as_json=True) def quotedString(string): @@ -412,12 +414,6 @@ def quotedString(string): return string.replace('"', "") -def quoted_string_output_json(string): - if not isinstance(string, str): - return string - return string.replace('"', "u'") - - def main(): module = AnsibleModule( argument_spec=dict( @@ -540,7 +536,6 @@ def main(): marker_begin = 'BEGIN' if not marker_end: marker_end = 'END' - force = '-f' if force else '' marker = "{0}\\n{1}\\n{2}".format(marker_begin, marker_end, marker) block = transformBlock(block, ' ', indentation) @@ -574,42 +569,31 @@ def main(): # state=present, insert/replace a block with matching regex pattern # state=absent, delete blocks with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, block, quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force) + return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force) else: - return_content = absent(src, quotedString(marker), encoding, force) + return_content = absent(src, marker, encoding, force) stdout = return_content.stdout_response stderr = return_content.stderr_response rc = return_content.rc + stdout = stdout.replace('/d', '\\\\d') try: - # change the return string to be loadable by json.loads() - stdout = stdout.replace('/c\\', '/c\\\\') - stdout = stdout.replace('/a\\', '/a\\\\') - stdout = stdout.replace('/i\\', '/i\\\\') - stdout = stdout.replace('$ a\\', '$ a\\\\') - stdout = stdout.replace('1 i\\', '1 i\\\\') - if block: - stdout = stdout.replace(block, quoted_string_output_json(block)) - if ins_aft: - stdout = stdout.replace(ins_aft, quoted_string_output_json(ins_aft)) - if ins_bef: - stdout = stdout.replace(ins_bef, quoted_string_output_json(ins_bef)) # Try to extract information from stdout - ret = json.loads(stdout) - ret['cmd'] = ret['cmd'].replace("u'", '"') - - result['cmd'] = ret['cmd'] - result['changed'] = ret['changed'] - result['found'] = ret['found'] - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc + # The triple double quotes is required for special characters (/_) been scape + ret = json.loads("""{0}""".format(stdout)) except Exception: messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) if result.get('backup_name'): messageDict['backup_name'] = result['backup_name'] module.fail_json(**messageDict) + + result['cmd'] = ret['data']['commands'] + result['changed'] = ret['data']['changed'] + result['found'] = ret['data']['found'] + # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case + # That information will be given with 'changed' and 'found' + if len(stderr): + result['stderr'] = str(stderr) + result['rc'] = rc module.exit_json(**result) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 6536509fd..a6576af12 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -275,18 +275,19 @@ sample: /path/to/file.txt.2015-02-03@04:15~ """ import json +import traceback from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: from zoautil_py import datasets except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) # supported data set types @@ -326,8 +327,8 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs src, line, regex=regexp, - ins_aft=ins_aft, - ins_bef=ins_bef, + insert_after=ins_aft, + insert_before=ins_bef, encoding=encoding, first_match=first_match, backref=backrefs, @@ -488,36 +489,36 @@ def main(): stdout = return_content.stdout_response stderr = return_content.stderr_response rc = return_content.rc + stdout = stdout.replace('/c\\', '/c\\\\') + stdout = stdout.replace('/a\\', '/a\\\\') + stdout = stdout.replace('/i\\', '/i\\\\') + stdout = stdout.replace('$ a\\', '$ a\\\\') + stdout = stdout.replace('1 i\\', '1 i\\\\') + stdout = stdout.replace('/d', '\\\\d') + if line: + stdout = stdout.replace(line, quotedString(line)) + if regexp: + stdout = stdout.replace(regexp, quotedString(regexp)) + if ins_aft: + stdout = stdout.replace(ins_aft, quotedString(ins_aft)) + if ins_bef: + stdout = stdout.replace(ins_bef, quotedString(ins_bef)) try: - # change the return string to be loadable by json.loads() - stdout = stdout.replace('/c\\', '/c\\\\') - stdout = stdout.replace('/a\\', '/a\\\\') - stdout = stdout.replace('/i\\', '/i\\\\') - stdout = stdout.replace('$ a\\', '$ a\\\\') - stdout = stdout.replace('1 i\\', '1 i\\\\') - if line: - stdout = stdout.replace(line, quotedString(line)) - if regexp: - stdout = stdout.replace(regexp, quotedString(regexp)) - if ins_aft: - stdout = stdout.replace(ins_aft, quotedString(ins_aft)) - if ins_bef: - stdout = stdout.replace(ins_bef, quotedString(ins_bef)) - # Try to extract information from return_content ret = json.loads(stdout) - result['cmd'] = ret['cmd'] - result['changed'] = ret['changed'] - result['found'] = ret['found'] - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc except Exception: messageDict = dict(msg="dsed return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) if result.get('backup_name'): messageDict['backup_name'] = result['backup_name'] module.fail_json(**messageDict) + + result['cmd'] = ret['cmd'] + result['changed'] = ret['changed'] + result['found'] = ret['found'] + # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case + # That information will be given with 'changed' and 'found' + if len(stderr): + result['stderr'] = str(stderr) + result['rc'] = rc module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 39d04639f..197bc9fa3 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -481,6 +481,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): params["path"] = full_path results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): + print(result) assert result.get("changed") == 1 results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): @@ -862,24 +863,25 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): finally: remove_uss_environment(ansible_zos_module) - -@pytest.mark.uss -def test_uss_block_insert_with_doublequotes(ansible_zos_module): - hosts = ansible_zos_module - params = dict(insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] - content = TEST_CONTENT_DOUBLEQUOTES - try: - set_uss_environment(ansible_zos_module, content, full_path) - params["path"] = full_path - results = hosts.all.zos_blockinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES - finally: - remove_uss_environment(ansible_zos_module) +# Test case base on bug of dataset.blockifile +# GH Issue #1258 +#@pytest.mark.uss +#def test_uss_block_insert_with_doublequotes(ansible_zos_module): +# hosts = ansible_zos_module +# params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") +# full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] +# content = TEST_CONTENT_DOUBLEQUOTES +# try: +# set_uss_environment(ansible_zos_module, content, full_path) +# params["path"] = full_path +# results = hosts.all.zos_blockinfile(**params) +# for result in results.contacted.values(): +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat {0}".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES +# finally: +# remove_uss_environment(ansible_zos_module) @pytest.mark.uss diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 256a21c71..445c0edfe 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -674,93 +674,97 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): finally: remove_ds_environment(ansible_zos_module, ds_name) - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER - finally: - remove_ds_environment(ansible_zos_module, ds_name) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE - finally: - remove_ds_environment(ansible_zos_module, ds_name) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH - finally: - remove_ds_environment(ansible_zos_module, ds_name) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH - finally: - remove_ds_environment(ansible_zos_module, ds_name) +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) + +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) + +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) + +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds From 9d39fb282e58a58b642bb2352478ba1272added5 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 1 Mar 2024 10:45:42 -0600 Subject: [PATCH 315/413] [zos_apf] Standardize ZOAU Imports (#1257) * Stadarized ZOAU Imports * Added missing import * Restored backup * Updated changelog * Update zos_apf.py --- changelogs/fragments/1257-zoau-import-zos_apf.yml | 3 +++ plugins/modules/zos_apf.py | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1257-zoau-import-zos_apf.yml diff --git a/changelogs/fragments/1257-zoau-import-zos_apf.yml b/changelogs/fragments/1257-zoau-import-zos_apf.yml new file mode 100644 index 000000000..71b46ba1b --- /dev/null +++ b/changelogs/fragments/1257-zoau-import-zos_apf.yml @@ -0,0 +1,3 @@ +trivial: + - zos_apf - Updated ZOAU imports from the module to capture traceback. + (https://github.com/ansible-collections/ibm_zos_core/pull/1257). diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index bba3beb19..117801306 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -297,13 +297,14 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) +import traceback try: from zoautil_py import zsystem except Exception: - Datasets = MissingZOAUImport() + zsystem = ZOAUImportError(traceback.format_exc()) # supported data set types From c365197b7c99e8e4a8881d53b497608ea74a4270 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 1 Mar 2024 10:49:40 -0600 Subject: [PATCH 316/413] [Enhancement] [Doc scripts] Modifed doc scripts to ensure compatibility between MacOS and GNU sed commands (#1202) * Modifed doc scripts to ensure compatibility between MacOs and GNU sed commands * Added changelog --- changelogs/fragments/1202-doc-gen-script-portability.yml | 4 ++++ docs/scripts/post-zos_apf.sh | 2 +- docs/scripts/pre-template.sh | 6 +++--- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1202-doc-gen-script-portability.yml diff --git a/changelogs/fragments/1202-doc-gen-script-portability.yml b/changelogs/fragments/1202-doc-gen-script-portability.yml new file mode 100644 index 000000000..3c2e6ddbb --- /dev/null +++ b/changelogs/fragments/1202-doc-gen-script-portability.yml @@ -0,0 +1,4 @@ +trivial: + - docs/scripts - Change to sed "-i" in place option which ensures compatibility between MacOS + and GNU versions of sed command. + (https://github.com/ansible-collections/ibm_zos_core/pull/1202). diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh index befcaecfe..d7ce5472b 100755 --- a/docs/scripts/post-zos_apf.sh +++ b/docs/scripts/post-zos_apf.sh @@ -28,5 +28,5 @@ SCRIPT_DIR=`dirname "$0"` CURR_PATH=`pwd` # Delete any temporary index RST if [[ -f $CURR_PATH/source/modules/zos_apf.rst ]]; then - sed -i '' "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst + sed -i'' -e "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst fi diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh index ca35775d9..3a2ac16d4 100755 --- a/docs/scripts/pre-template.sh +++ b/docs/scripts/pre-template.sh @@ -27,6 +27,6 @@ template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` cp $template_doc_source $template_doc_source.tmp -sed -i '' "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source -sed -i '' "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source -sed -i '' "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source +sed -i'' -e "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source +sed -i'' -e "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source +sed -i'' -e "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source From f9d53342d46a24267c259f62dc7fa07b74d842cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 4 Mar 2024 10:50:20 -0600 Subject: [PATCH 317/413] [Enabler][1104]migrate_zos_backup_restore (#1265) * Migrated zos_backup_restore to 1.3 * Fixed sanity * Added hlq * Updated backup * Migrate blockinfile * Fix trouble * Fix trash left and new way to test module * Fix variable name * Ensure Diferent HLQ * Add fragment * Change copyright notation * Fix documentation * Get better code and documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1265_Migrate_zos_backup_restore.yml | 7 +++ plugins/modules/zos_backup_restore.py | 63 +++++++++++++------ .../modules/test_zos_backup_restore.py | 39 +++++++----- 3 files changed, 74 insertions(+), 35 deletions(-) create mode 100644 changelogs/fragments/1265_Migrate_zos_backup_restore.yml diff --git a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml new file mode 100644 index 000000000..9afe4afc3 --- /dev/null +++ b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml @@ -0,0 +1,7 @@ +trivial: + - zos_backup_restore - Refactor zos_backup_restore module and functional tests for ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1265). +minor_changes: + - zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier + (HLQ) for temporary and backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/1265). \ No newline at end of file diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 080c7efab..3185652e1 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -186,6 +186,14 @@ - Defaults to running user's username. type: str required: false + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary and backup + data sets. + - The default HLQ is the Ansible user that executes the module and if + that is not available, then the value of C(TMPHLQ) is used. + required: false + type: str """ RETURN = r"""""" @@ -312,15 +320,16 @@ from re import match, search, IGNORECASE from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from os import path - +import traceback try: - from zoautil_py import datasets, exceptions + from zoautil_py import datasets + from zoautil_py import exceptions as zoau_exceptions except ImportError: - datasets = MissingZOAUImport() - exceptions = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + zoau_exceptions = ZOAUImportError(traceback.format_exc()) def main(): @@ -347,6 +356,7 @@ def main(): sms_storage_class=dict(type="str", required=False), sms_management_class=dict(type="str", required=False), hlq=dict(type="str", required=False), + tmp_hlq=dict(type="str", required=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) @@ -365,6 +375,7 @@ def main(): sms_storage_class = params.get("sms_storage_class") sms_management_class = params.get("sms_management_class") hlq = params.get("hlq") + tmp_hlq = params.get("tmp_hlq") if operation == "backup": backup( @@ -380,6 +391,7 @@ def main(): space_type=space_type, sms_storage_class=sms_storage_class, sms_management_class=sms_management_class, + tmp_hlq=tmp_hlq, ) else: restore( @@ -396,6 +408,7 @@ def main(): space_type=space_type, sms_storage_class=sms_storage_class, sms_management_class=sms_management_class, + tmp_hlq=tmp_hlq, ) result["changed"] = True @@ -444,6 +457,7 @@ def parse_and_validate_args(params): sms_storage_class=dict(type=sms_type, required=False), sms_management_class=dict(type=sms_type, required=False), hlq=dict(type=hlq_type, default=hlq_default, dependencies=["operation"]), + tmp_hlq=dict(type=hlq_type, required=False), ) parsed_args = BetterArgParser(arg_defs).parse_args(params) @@ -466,6 +480,7 @@ def backup( space_type, sms_storage_class, sms_management_class, + tmp_hlq, ): """Backup data sets or a volume to a new data set or unix file. @@ -482,10 +497,11 @@ def backup( space_type (str): The unit of measurement to use when defining data set space. sms_storage_class (str): Specifies the storage class to use. sms_management_class (str): Specifies the management class to use. + tmp_hlq (str): Specifies the tmp hlq to temporary datasets """ args = locals() zoau_args = to_dzip_args(**args) - datasets.zip(**zoau_args) + datasets.dzip(**zoau_args) def restore( @@ -502,6 +518,7 @@ def restore( space_type, sms_storage_class, sms_management_class, + tmp_hlq, ): """[summary] @@ -523,23 +540,26 @@ def restore( space_type (str): The unit of measurement to use when defining data set space. sms_storage_class (str): Specifies the storage class to use. sms_management_class (str): Specifies the management class to use. + tmp_hlq (str): : Specifies the tmp hlq to temporary datasets """ args = locals() zoau_args = to_dunzip_args(**args) - response = datasets._unzip(**zoau_args) + output = "" + try: + rc = datasets.dunzip(**zoau_args) + except zoau_exceptions.ZOAUException as dunzip_exception: + output = dunzip_exception.response.stdout_response + output = output + dunzip_exception.response.stderr_response + rc = get_real_rc(output) failed = False - true_rc = response.rc - if response.rc > 0: - output = response.stdout_response + response.stderr_response - true_rc = get_real_rc(output) or true_rc - if true_rc > 0 and true_rc <= 4: + if rc > 0 and rc <= 4: if recover is not True: failed = True - elif true_rc > 0: + elif rc > 4: failed = True if failed: - raise exceptions.ZOAUException( - "%s,RC=%s" % (response.stderr_response, response.rc) + raise zoau_exceptions.ZOAUException( + "{0}, RC={1}".format(output, rc) ) @@ -631,7 +651,7 @@ def hlq_default(contents, dependencies): """ hlq = None if dependencies.get("operation") == "restore": - hlq = datasets.hlq() + hlq = datasets.get_hlq() return hlq @@ -791,6 +811,10 @@ def to_dzip_args(**kwargs): if kwargs.get("space_type"): size += kwargs.get("space_type") zoau_args["size"] = size + + if kwargs.get("tmp_hlq"): + zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) + return zoau_args @@ -844,7 +868,10 @@ def to_dunzip_args(**kwargs): zoau_args["size"] = size if kwargs.get("hlq"): - zoau_args["hlq"] = kwargs.get("hlq") + zoau_args["high_level_qualifier"] = kwargs.get("hlq") + + if kwargs.get("tmp_hlq"): + zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) return zoau_args diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 1b44ec124..a35750b63 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -26,7 +26,7 @@ DATA_SET_QUALIFIER2 = "{0}.PRIVATE.TESTDS2" DATA_SET_BACKUP_LOCATION = "MY.BACKUP" UNIX_BACKUP_LOCATION = "/tmp/mybackup.dzp" -NEW_HLQ = "NEWHLQ" +NEW_HLQ = "TMPHLQ" DATA_SET_RESTORE_LOCATION = DATA_SET_QUALIFIER.format(NEW_HLQ) DATA_SET_RESTORE_LOCATION2 = DATA_SET_QUALIFIER2.format(NEW_HLQ) @@ -73,6 +73,10 @@ def delete_data_set(hosts, data_set_name): def delete_file(hosts, path): hosts.all.file(path=path, state="absent") +def delete_remnants(hosts): + hosts.all.shell(cmd="drm 'ANSIBLE.*'") + hosts.all.shell(cmd="drm 'TEST.*'") + hosts.all.shell(cmd="drm 'TMPHLQ.*'") def get_unused_volume_serial(hosts): found = False @@ -87,7 +91,6 @@ def is_volume(hosts, volume): results = hosts.all.shell(cmd="vtocls ${volume}") failed = False for result in results.contacted.values(): - print(result) if result.get("failed", False) is True: failed = True if result.get("rc", 0) > 0: @@ -130,7 +133,6 @@ def assert_data_set_or_file_does_not_exist(hosts, name): def assert_data_set_exists(hosts, data_set_name): results = hosts.all.shell("dls '{0}'".format(data_set_name.upper())) for result in results.contacted.values(): - print(result) found = search( "^{0}$".format(data_set_name), result.get("stdout"), IGNORECASE | MULTILINE ) @@ -213,6 +215,7 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -249,6 +252,7 @@ def test_backup_of_data_set_when_backup_dest_exists( finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -269,6 +273,7 @@ def test_backup_and_restore_of_data_set( ): hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + new_hlq = NEW_HLQ try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -282,19 +287,21 @@ def test_backup_and_restore_of_data_set( overwrite=overwrite, recover=recover, ) + if not overwrite: + new_hlq = "TEST" assert_module_did_not_fail(results) assert_data_set_or_file_exists(hosts, backup_name) results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, - hlq=NEW_HLQ, + hlq=new_hlq, overwrite=overwrite, ) assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -348,8 +355,8 @@ def test_backup_and_restore_of_data_set_various_space_measurements( assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -397,8 +404,8 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( assert_module_failed(results) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): @@ -428,15 +435,13 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): backup_name=DATA_SET_BACKUP_LOCATION, overwrite=True, recover=True, - hlq=NEW_HLQ, ) assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): @@ -473,9 +478,8 @@ def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): @@ -485,7 +489,6 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( @@ -514,9 +517,9 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -545,7 +548,7 @@ def test_restore_of_data_set_when_backup_does_not_exist( finally: delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) - + delete_remnants(hosts) @pytest.mark.parametrize( "backup_name", @@ -574,7 +577,7 @@ def test_backup_of_data_set_when_data_set_does_not_exist( finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) - + delete_remnants(hosts) def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module @@ -597,6 +600,7 @@ def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): @@ -629,6 +633,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) # def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module): From ba43c842d15272683c70a336f9bc93ff13c215d0 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Mon, 4 Mar 2024 15:49:47 -0800 Subject: [PATCH 318/413] [1.10.0] [zos_data_set] Bugfix/1268/quick fix len of volumes work around (#1270) * add None check for volumes in create function Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update changelog fragment name Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../fragments/1270-quick-fix-len-of-volumes-work-around.yml | 5 +++++ plugins/module_utils/data_set.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml diff --git a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml new file mode 100644 index 000000000..1f6ba201d --- /dev/null +++ b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml @@ -0,0 +1,5 @@ +trivial: + - module_utils/data_set.py - len(volme) was always called on receiving + DatasetVerificationError from Dataset.create() even though volumes=None was + a valid possible outcome. The fix adds a null check to the conditional. + (https://github.com/ansible-collections/ibm_zos_core/pull/1270). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 34346dc12..613bc9973 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1030,7 +1030,7 @@ def create( ) except exceptions.DatasetVerificationError as e: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 - if len(volumes) > 1: + if volumes and len(volumes) > 1: if DataSet.data_set_cataloged(name, volumes): return 0 raise DatasetCreateError( From d3e14f3717d453b1749462e513d9174e4c452339 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 6 Mar 2024 16:22:30 -0700 Subject: [PATCH 319/413] [v1.10.0] [zos_job_submit] Handling of non-UTF8 chars in job output (#1261) * Added test to validate handling of non-UTF8 chars * Add changelog fragment * Clean up new test * Add try-except block when reading a job's output * Remove commented code * Update changelog fragment * Change job queried in test --- .../1261-job-submit-non-utf8-chars.yml | 9 ++ plugins/module_utils/job.py | 27 +++--- .../modules/test_zos_job_query_func.py | 2 +- .../modules/test_zos_job_submit_func.py | 85 ++++++++++++++++++- 4 files changed, 109 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1261-job-submit-non-utf8-chars.yml diff --git a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml new file mode 100644 index 000000000..7f322afe4 --- /dev/null +++ b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml @@ -0,0 +1,9 @@ +bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. + (https://github.com/ansible-collections/ibm_zos_core/pull/1261). +trivial: + - zos_job_submit - add test case to validate a bugfix in ZOAU v1.3.0 that + handles non-UTF8 characters correctly in a job's output. + (https://github.com/ansible-collections/ibm_zos_core/pull/1261). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index af96c6ab6..1afdaed55 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -356,11 +356,21 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "step_name" in single_dd: if "dd_name" in single_dd: - tmpcont = jobs.read_output( - entry.job_id, - single_dd["step_name"], - single_dd["dd_name"] - ) + # In case ZOAU fails when reading the job output, we'll + # add a message to the user telling them of this. + # ZOAU cannot read partial output from a job, so we + # have to make do with nothing from this step if it fails. + try: + tmpcont = jobs.read_output( + entry.job_id, + single_dd["step_name"], + single_dd["dd_name"] + ) + except UnicodeDecodeError: + tmpcont = ( + "Non-printable UTF-8 characters were present in this output. " + "Please access it manually." + ) dd["content"] = tmpcont.split("\n") job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) @@ -393,13 +403,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["ret_code"]["msg_code"] = None job["ret_code"]["code"] = None - # if len(list_of_dds) > 0: - # The duration should really only be returned for job submit but the code - # is used job_output as well, for now we can ignore this point unless - # we want to offer a wait_time_s for job output which might be reasonable. - # Note: Moved this to the upper time loop, so it should always be populated. - # job["duration"] = duration - final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 8c1f170ed..ee7b03157 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -114,7 +114,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): def test_zos_job_id_query_short_ids_func(ansible_zos_module): hosts = ansible_zos_module - qresults = hosts.all.zos_job_query(job_id="STC003") + qresults = hosts.all.zos_job_query(job_id="STC00002") for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 9de3e992a..0694cdfa0 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -255,10 +255,56 @@ // """ -JCL_FULL_INPUT="""//HLQ0 JOB MSGLEVEL=(1,1), +JCL_FULL_INPUT = """//HLQ0 JOB MSGLEVEL=(1,1), // MSGCLASS=A,CLASS=A,NOTIFY=&SYSUID //STEP1 EXEC PGM=BPXBATCH,PARM='PGM /bin/sleep 5'""" +C_SRC_INVALID_UTF8 = """#include <stdio.h> +int main() +{ + unsigned char a=0x64; + unsigned char b=0x2A; + unsigned char c=0xB8; + unsigned char d=0xFF; + unsigned char e=0x81; + unsigned char f=0x82; + unsigned char g=0x83; + unsigned char h=0x00; + printf("Value of a: Hex: %X, character: %c",a,a); + printf("Value of b: Hex: %X, character: %c",b,b); + printf("Value of c: Hex: %X, character: %c",c,c); + printf("Value of d: Hex: %X, character: %c",d,d); + printf("Value of a: Hex: %X, character: %c",e,e); + printf("Value of b: Hex: %X, character: %c",f,f); + printf("Value of c: Hex: %X, character: %c",g,g); + printf("Value of d: Hex: %X, character: %c",h,h); + return 0; +} +""" + +JCL_INVALID_UTF8_CHARS_EXC = """//* +//****************************************************************************** +//* Job that runs a C program that returns characters outside of the UTF-8 range +//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 onwards +//* that deals properly with these chars. +//* The JCL needs to be formatted to give it the directory where the C program +//* is located. +//****************************************************************************** +//NOEBCDIC JOB (T043JM,JM00,1,0,0,0),'NOEBCDIC - JRM', +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=&SYSUID +//NOPRINT EXEC PGM=BPXBATCH +//STDPARM DD * +SH ( +cd {0}; +./noprint; +exit 0; +) +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +// +""" + TEMP_PATH = "/tmp/jcl" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -712,3 +758,40 @@ def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): assert re.search(r'error ? ?', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" + + +# This test case is related to the following GitHub issues: +# - https://github.com/ansible-collections/ibm_zos_core/issues/677 +# - https://github.com/ansible-collections/ibm_zos_core/issues/972 +# - https://github.com/ansible-collections/ibm_zos_core/issues/1160 +# - https://github.com/ansible-collections/ibm_zos_core/issues/1255 +def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): + try: + hosts = ansible_zos_module + + # Copy C source and compile it. + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/noprint.c".format(quote(C_SRC_INVALID_UTF8), TEMP_PATH) + ) + hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c") + + # Create local JCL and submit it. + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_INVALID_UTF8_CHARS_EXC.format(TEMP_PATH)) + + results = hosts.all.zos_job_submit( + src=tmp_file.name, + location="LOCAL", + wait_time_s=15 + ) + + for result in results.contacted.values(): + # We shouldn't get an error now that ZOAU handles invalid/unprintable + # UTF-8 chars correctly. + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + finally: + hosts.all.file(path=TEMP_PATH, state="absent") From 9799ab1ac452acd548f18fa158fa54752648b77a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 8 Mar 2024 10:11:07 -0600 Subject: [PATCH 320/413] [v1.10.0] [Documentation] Replaced path to src in zos_archive and zos_unarchive documentation (#1286) * Replaced path to src in zos_archive and zos_unarchive documentation * Added changelog --- .../1286-update-zos_archive-zos_unarchive-docs.yml | 5 +++++ plugins/modules/zos_archive.py | 10 +++++----- plugins/modules/zos_unarchive.py | 14 +++++++------- 3 files changed, 17 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml diff --git a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml new file mode 100644 index 000000000..ef213b06f --- /dev/null +++ b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml @@ -0,0 +1,5 @@ +trivial: + - zos_archive - Updated examples to use path instead of src. + (https://github.com/ansible-collections/ibm_zos_core/pull/1286). + - zos_unarchive - Updated examples and return dict to use path instead of src. + (https://github.com/ansible-collections/ibm_zos_core/pull/1286). \ No newline at end of file diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 959d263d9..951b6bc87 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -325,7 +325,7 @@ # Simple archive - name: Archive file into a tar zos_archive: - path: /tmp/archive/foo.txt + src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: name: tar @@ -333,7 +333,7 @@ # Archive multiple files - name: Compress list of files into a zip zos_archive: - path: + src: - /tmp/archive/foo.txt - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip @@ -343,7 +343,7 @@ # Archive one data set into terse - name: Compress data set into a terse zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -351,7 +351,7 @@ # Use terse with different options - name: Compress data set into a terse, specify pack algorithm and use adrdssu zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -362,7 +362,7 @@ # Use a pattern to store - name: Compress data set pattern using xmit zos_archive: - path: "USER.ARCHIVE.*" + src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index fcbda95e1..e9b17766c 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -323,14 +323,14 @@ # Simple extract - name: Copy local tar file and unpack it on the managed z/OS node. zos_unarchive: - path: "./files/archive_folder_test.tar" + src: "./files/archive_folder_test.tar" format: name: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: - path: "/tmp/test.bz2" + src: "/tmp/test.bz2" format: name: bz2 include: @@ -339,7 +339,7 @@ # Use exclude - name: Unarchive a terse data set and excluding data sets from unpacking. zos_unarchive: - path: "USER.ARCHIVE.RESULT.TRS" + src: "USER.ARCHIVE.RESULT.TRS" format: name: terse exclude: @@ -349,7 +349,7 @@ # List option - name: List content from XMIT zos_unarchive: - path: "USER.ARCHIVE.RESULT.XMIT" + src: "USER.ARCHIVE.RESULT.XMIT" format: name: xmit format_options: @@ -358,14 +358,14 @@ ''' RETURN = r''' -path: +src: description: - File path or data set name unarchived. + File path or data set name unpacked. type: str returned: always dest_path: description: - - Destination path where archive was extracted. + - Destination path where archive was unpacked. type: str returned: always targets: From 068a1a521e00ae8079f4ebe67cff8d510b28580b Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 11 Mar 2024 13:34:51 -0700 Subject: [PATCH 321/413] [v1.10.0-beta.1][port forward] Documentation to update zos_ping about the deprecated scp in OpenSSH 9 or later. (#1295) * Update zos_ping to note OpenSSH deprecation of SCP Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragement after cherry-pick Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/1295-doc-zos_ping-scp.yml | 7 +++++++ docs/source/modules/zos_ping.rst | 12 ++++++++++++ plugins/modules/zos_ping.py | 12 +++++++++++- plugins/modules/zos_ping.rexx | 4 ++-- 4 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1295-doc-zos_ping-scp.yml diff --git a/changelogs/fragments/1295-doc-zos_ping-scp.yml b/changelogs/fragments/1295-doc-zos_ping-scp.yml new file mode 100644 index 000000000..a9477150d --- /dev/null +++ b/changelogs/fragments/1295-doc-zos_ping-scp.yml @@ -0,0 +1,7 @@ +trivial: + - zos_ping - Update zos_ping documentation to instruct users how + to fall back to legacy SCP when using OpenSSH 9.0 or later. + (https://github.com/ansible-collections/ibm_zos_core/pull/1295). + - zos_ping - Update zos_ping REXX source to check for python + version 3.10 or later. + (https://github.com/ansible-collections/ibm_zos_core/pull/1295). \ No newline at end of file diff --git a/docs/source/modules/zos_ping.rst b/docs/source/modules/zos_ping.rst index a9a959dfe..a4405b473 100644 --- a/docs/source/modules/zos_ping.rst +++ b/docs/source/modules/zos_ping.rst @@ -40,10 +40,22 @@ Examples +Notes +----- +.. note:: + This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry ``scp_extra_args="-O"`` into the ini file named ``ansible.cfg``. +See Also +-------- + +.. seealso:: + + - :ref:`ansible.builtin.ssh_module` + + Return Values diff --git a/plugins/modules/zos_ping.py b/plugins/modules/zos_ping.py index eb44740e8..6de0cccf0 100644 --- a/plugins/modules/zos_ping.py +++ b/plugins/modules/zos_ping.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -31,6 +31,16 @@ - "Blake Becker (@blakeinate)" - "Demetrios Dimatos (@ddimatos)" options: {} +notes: + - This module is written in REXX and relies on the SCP protocol to transfer the source to + the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. + Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers + are no longer treated as text and are transferred as binary preserving the source files + encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, + you can instruct SSH to use SCP by adding the entry C(scp_extra_args="-O") into the ini + file named C(ansible.cfg). +seealso: +- module: ansible.builtin.ssh """ EXAMPLES = r""" diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index a4fd53340..a881146b0 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -62,7 +62,7 @@ Parse Arg argFile . pythonName = 'Python' majVersionPython = 3 -minVersionPython = 8 +minVersionPython = 10 warningJsonList = '' If (argFile = '') Then Do @@ -85,7 +85,7 @@ If (rc <> 0 | returnCode <> HWTJ_OK) Then Do failModule(errmsg, "", retC) End -/* Check for Python version >= 3.8 eg: 'Python 3.8.2' */ +/* Check for Python version >= 3.8 eg: 'Python 3.10.0' */ retC = bpxwunix('python3 --version', out., err.) If (err.0 > 0) Then Do Do index=1 To err.0 From 02b49be1ff50aa054bd86c5c47d1eca0dfd09e7e Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 11 Mar 2024 13:35:53 -0700 Subject: [PATCH 322/413] [v1.10.0-beta.1][port forward] Add chained command example to zos_tso_command (#1293) * Add chained command example with folding scalar and chomp Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright year Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Updated zos_tso_command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Updted changelog fragment PR Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../1292-doc-zos_tso_command-example.yml | 4 +++ docs/source/modules/zos_tso_command.rst | 33 ++++++++++------- plugins/modules/zos_tso_command.py | 36 +++++++++++-------- 3 files changed, 46 insertions(+), 27 deletions(-) create mode 100644 changelogs/fragments/1292-doc-zos_tso_command-example.yml diff --git a/changelogs/fragments/1292-doc-zos_tso_command-example.yml b/changelogs/fragments/1292-doc-zos_tso_command-example.yml new file mode 100644 index 000000000..6ed868be7 --- /dev/null +++ b/changelogs/fragments/1292-doc-zos_tso_command-example.yml @@ -0,0 +1,4 @@ +trivial: + - zos_tso_command - Added an example on how to chain multiple TSO commands such + that they are invoked together when dependent on each other. + (https://github.com/ansible-collections/ibm_zos_core/pull/1293). \ No newline at end of file diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index 816a859e7..f3cdb0254 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -55,27 +55,34 @@ Examples .. code-block:: yaml+jinja - - name: Execute TSO commands to allocate a new dataset + - name: Execute TSO commands to allocate a new dataset. zos_tso_command: - commands: - - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') - - delete 'TEST.HILL3.TEST' + commands: + - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') + - delete 'TEST.HILL3.TEST' - - name: Execute TSO command list user TESTUSER to obtain TSO information + - name: Execute TSO command List User (LU) for TESTUSER to obtain TSO information. zos_tso_command: - commands: - - LU TESTUSER + commands: + - LU TESTUSER - - name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) + - name: Execute TSO command List Dataset (LISTDSD) and allow for maximum return code of 4. zos_tso_command: - commands: - - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC - max_rc: 4 + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 - name: Execute TSO command to run explicitly a REXX script from a data set. zos_tso_command: - commands: - - EXEC HLQ.DATASET.REXX exec + commands: + - EXEC HLQ.DATASET.REXX exec + + - name: Chain multiple TSO commands into one invocation using semicolons. + zos_tso_command: + commands: >- + ALLOCATE DDNAME(IN1) DSNAME('HLQ.PDSE.DATA.SRC(INPUT)') SHR; + ALLOCATE DDNAME(OUT1) DSNAME('HLQ.PDSE.DATA.DEST(OUTPUT)') SHR; + OCOPY INDD(IN1) OUTDD(OUT1) BINARY; diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 28b033a90..6c2cb6ef6 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -26,6 +26,7 @@ author: - "Xiao Yuan Ma (@bjmaxy)" - "Rich Parker (@richp405)" + - "Demetrios Dimatos (@ddimatos)" options: commands: description: @@ -94,27 +95,34 @@ """ EXAMPLES = r""" -- name: Execute TSO commands to allocate a new dataset +- name: Execute TSO commands to allocate a new dataset. zos_tso_command: - commands: - - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') - - delete 'TEST.HILL3.TEST' + commands: + - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') + - delete 'TEST.HILL3.TEST' -- name: Execute TSO command list user TESTUSER to obtain TSO information +- name: Execute TSO command List User (LU) for TESTUSER to obtain TSO information. zos_tso_command: - commands: - - LU TESTUSER + commands: + - LU TESTUSER -- name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) +- name: Execute TSO command List Dataset (LISTDSD) and allow for maximum return code of 4. zos_tso_command: - commands: - - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC - max_rc: 4 + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 - name: Execute TSO command to run a REXX script explicitly from a data set. zos_tso_command: - commands: - - EXEC HLQ.DATASET.REXX exec + commands: + - EXEC HLQ.DATASET.REXX exec + +- name: Chain multiple TSO commands into one invocation using semicolons. + zos_tso_command: + commands: >- + ALLOCATE DDNAME(IN1) DSNAME('HLQ.PDSE.DATA.SRC(INPUT)') SHR; + ALLOCATE DDNAME(OUT1) DSNAME('HLQ.PDSE.DATA.DEST(OUTPUT)') SHR; + OCOPY INDD(IN1) OUTDD(OUT1) BINARY; """ from ansible.module_utils.basic import AnsibleModule From 1c8259210b0b200e2dceaac2c3d1aac1ffff963a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 13 Mar 2024 10:15:15 -0600 Subject: [PATCH 323/413] [Enabler][995 996]Remove_local_charset_from_zos_fetch (#1298) * First iteration * Remove latest sanity cases * Fix sanity * Remove to last local_charset left * Add fragment * Add comment of explanation * Change changelog --- .../1298-Remove_local_charset_from_zos_fetch.yml | 3 +++ plugins/action/zos_fetch.py | 14 +++++++++----- plugins/modules/zos_fetch.py | 10 ++++++---- tests/sanity/ignore-2.14.txt | 2 -- tests/sanity/ignore-2.15.txt | 2 -- tests/sanity/ignore-2.16.txt | 2 -- 6 files changed, 18 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml diff --git a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml new file mode 100644 index 000000000..ca1ea840e --- /dev/null +++ b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove argument not documented. + (https://github.com/ansible-collections/ibm_zos_core/pull/1298). \ No newline at end of file diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 087c70953..611922bf3 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -107,7 +107,7 @@ def run(self, tmp=None, task_vars=None): src = self._task.args.get('src') dest = self._task.args.get('dest') - encoding = self._task.args.get('encoding') + encoding = self._task.args.get('encoding', None) flat = _process_boolean(self._task.args.get('flat'), default=False) is_binary = _process_boolean(self._task.args.get('is_binary')) ignore_sftp_stderr = _process_boolean( @@ -219,9 +219,13 @@ def run(self, tmp=None, task_vars=None): # Execute module on remote host # # ********************************************************** # new_module_args = self._task.args.copy() - new_module_args.update( - dict(local_charset=encode.Defaults.get_default_system_charset()) - ) + encoding_to = None + if encoding: + encoding_to = encoding.get("to", None) + if encoding is None or encoding_to is None: + new_module_args.update( + dict(encoding=dict(to=encode.Defaults.get_default_system_charset())) + ) remote_path = None try: fetch_res = self._execute_module( diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 2b32f0760..dc4bc8071 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -584,7 +584,6 @@ def run_module(): validate_checksum=dict(required=False, default=True, type="bool"), encoding=dict(required=False, type="dict"), ignore_sftp_stderr=dict(type="bool", default=False, required=False), - local_charset=dict(type="str"), tmp_hlq=dict(required=False, type="str", default=None), ) ) @@ -606,7 +605,7 @@ def run_module(): tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), ) - if not module.params.get("encoding") and not module.params.get("is_binary"): + if not module.params.get("encoding").get("from") and not module.params.get("is_binary"): mvs_src = data_set.is_data_set(src) remote_charset = encode.Defaults.get_default_system_charset() @@ -614,10 +613,13 @@ def run_module(): "from": encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET if mvs_src else remote_charset, - "to": module.params.get("local_charset"), + "to": module.params.get("encoding").get("to"), } - if module.params.get("encoding"): + # We check encoding 'from' and 'to' because if the user pass both arguments of encoding, + # we honor those but encoding 'to' is an argument that the code obtain any time. + # Encoding will not be null and will generate problems as encoding 'from' could came empty. + if module.params.get("encoding").get("from") and module.params.get("encoding").get("to"): module.params.update( dict( from_encoding=module.params.get("encoding").get("from"), diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 89cf4db51..55477a2d0 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -10,8 +10,6 @@ plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # License plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 89cf4db51..55477a2d0 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -10,8 +10,6 @@ plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # License plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 89cf4db51..55477a2d0 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -10,8 +10,6 @@ plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # License plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ce2551abd7724701e557be0af583f2de92ff5047 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 13 Mar 2024 13:48:20 -0400 Subject: [PATCH 324/413] [v1.10.0] [Enabler] [zos_data_set] Enabler/992/fixsanityfourin110 (#1285) * catch up changes for core 1.10 changelog addition zos_data set to correct the choices value in type, space_type test module changed to request only UPPER types * record_format values, plus aliases (data_class, format, size) also capitalized examples and changed case-insensitive to sensitive in docs * added result output to test to see what failed in creation data_set: missed a comma in a choices entry * indentation error in documentation * arg name had a choice parameter (paste-o) submit call in a test had a 'wait' parameter * confusion with sms_storage_class aliase data_class versus sms_data_class with no alias * commented out catalog before delete actions in testing that pair (catalog, then absent) seems to consistently fail * pulled old print_results, moved to present when cataloged final test * changing exception handling in data_set which was accessing non-existing members * re-enabled pre-catalog, moved print_resp to line 326 * re-printing @166 on creation * change 'space_type' to default=M, so code matches docs * added default record_format to 'FB' * forcing record_format to FB if none * adding printresult do create when absent for details * needed to eliminate length check on 'volumes' which can legit be a nonetype * corrected output header in test, added secondary default to zos_data_set/record_type * expanded results @ 416 to show both creation run results. * testing changing vsam (esds) record to F record format * cleaned inline comment, removed esds from data set creation list * re-elaborating output on cat/uncat/recat testing * removed rrds as well as esds types * updated test for 413, and old_aliases to use 2 vars removed default=F for vsam type * re-enabled complext ds types, and added logging enabler to test system * expanded exception on dataset creation to show calling params * printing ensure present 253 * dumping formatted params in exception handler * changing record format enforcement * forcing the blank record_type for vsam dataset types * expanded settings replacement to after arg parser removed vvv printouts and logging import * added redundant value check, so mutually exclusive values are checked before and after arg parsing. * allow record size in batch, because it will get cleared out before use * changing 'size' from an alias to an optional parameter, because the type changed * corrected missing commas * added print to create/delete in batch * removed 'size' parameter, and removed 'old args' test and repair routines. * added print back into 184 to make sure we tripped on a junk file * correcting double-creation of data set final tests * correction of result->results for a test loop. * removed extra output from test * corrected sanity fragment to include PR# removed extra/debug output from data_set * changed exception handler in data_set to match new exception class. --- changelogs/fragments/992-fix-sanity4to6.yml | 7 + plugins/module_utils/data_set.py | 4 +- plugins/modules/zos_data_set.py | 176 ++++++++++++------ .../modules/test_zos_data_set_func.py | 97 +++++----- tests/sanity/ignore-2.10.txt | 2 - tests/sanity/ignore-2.11.txt | 3 - tests/sanity/ignore-2.12.txt | 3 - tests/sanity/ignore-2.13.txt | 3 - tests/sanity/ignore-2.14.txt | 3 - tests/sanity/ignore-2.15.txt | 3 - tests/sanity/ignore-2.16.txt | 3 - tests/sanity/ignore-2.9.txt | 3 - 12 files changed, 171 insertions(+), 136 deletions(-) create mode 100644 changelogs/fragments/992-fix-sanity4to6.yml diff --git a/changelogs/fragments/992-fix-sanity4to6.yml b/changelogs/fragments/992-fix-sanity4to6.yml new file mode 100644 index 000000000..3d9637c63 --- /dev/null +++ b/changelogs/fragments/992-fix-sanity4to6.yml @@ -0,0 +1,7 @@ +trivial: + - zos_data_set.py - Corrected references to input variable definitions + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). + - data_set.py - Updated exception handler to match what was returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). + - test_zos_data_set_func.py - Removed test of discontinued function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 613bc9973..3bd502858 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1022,11 +1022,11 @@ def create( formatted_args = DataSet._build_zoau_args(**original_args) try: datasets.create(**formatted_args) - except exceptions.ZOAUException as create_exception: + except exceptions._ZOAUExtendableException as create_exception: raise DatasetCreateError( name, create_exception.response.rc, - create_exception.response.stdout_response + create_exception.response.stderr_response + create_exception.response.stdout_response + "\n" + create_exception.response.stderr_response ) except exceptions.DatasetVerificationError as e: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 8b0485826..1969462c3 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -24,7 +24,9 @@ description: - Create, delete and set attributes of data sets. - When forcing data set replacement, contents will not be preserved. -author: "Blake Becker (@blakeinate)" +author: + - "Blake Becker (@blakeinate)" + - "Rich Parker (@richp405)" options: name: description: @@ -109,7 +111,7 @@ description: - The data set type to be used when creating a data set. (e.g C(pdse)) - C(MEMBER) expects to be used with an existing partitioned data set. - - Choices are case-insensitive. + - Choices are case-sensitive. required: false type: str choices: @@ -157,7 +159,7 @@ record_format: description: - The format of the data set. (e.g C(FB)) - - Choices are case-insensitive. + - Choices are case-sensitive. - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS) then I(record_format=None), these types do not have a default I(record_format). @@ -171,6 +173,8 @@ - F type: str default: FB + aliases: + - format sms_storage_class: description: - The storage class for an SMS-managed dataset. @@ -179,6 +183,8 @@ - Note that all non-linear VSAM datasets are SMS-managed. type: str required: false + aliases: + - data_class sms_data_class: description: - The data class for an SMS-managed dataset. @@ -370,7 +376,7 @@ description: - The data set type to be used when creating a data set. (e.g C(PDSE)) - C(MEMBER) expects to be used with an existing partitioned data set. - - Choices are case-insensitive. + - Choices are case-sensitive. required: false type: str choices: @@ -418,7 +424,7 @@ record_format: description: - The format of the data set. (e.g C(FB)) - - Choices are case-insensitive. + - Choices are case-sensitive. - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS) then I(record_format=None), these types do not have a default I(record_format). @@ -432,6 +438,8 @@ - F type: str default: FB + aliases: + - format sms_storage_class: description: - The storage class for an SMS-managed dataset. @@ -440,6 +448,8 @@ - Note that all non-linear VSAM datasets are SMS-managed. type: str required: false + aliases: + - data_class sms_data_class: description: - The data class for an SMS-managed dataset. @@ -539,7 +549,7 @@ - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: seq + type: SEQ state: present - name: Create a PDS data set if it does not exist @@ -548,26 +558,26 @@ type: pds space_primary: 5 space_type: M - record_format: fba + record_format: FBA record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 volumes: "222222" replace: yes @@ -575,19 +585,19 @@ - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: esds + type: ESDS - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: ksds + type: KSDS key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: rrds + type: RRDS sms_storage_class: mydata - name: Delete a data set if it exists @@ -632,7 +642,7 @@ type: PDS space_primary: 5 space_type: M - record_format: fb + record_format: FB replace: yes - name: someds.name.here1(member1) type: MEMBER @@ -799,7 +809,7 @@ def space_type(contents, dependencies): """Validates provided data set unit of space is valid. Returns the unit of space.""" if dependencies.get("state") == "absent": - return None + return "M" if contents is None: return None match = re.fullmatch(r"(M|G|K|TRK|CYL)", contents, re.IGNORECASE) @@ -865,9 +875,9 @@ def record_format(contents, dependencies): """Validates data set format is valid. Returns uppercase data set format.""" if dependencies.get("state") == "absent": - return None + return "FB" if contents is None: - return None + return "FB" formats = "|".join(DATA_SET_FORMATS) if not re.fullmatch(formats, contents, re.IGNORECASE): raise ValueError( @@ -986,33 +996,7 @@ def perform_data_set_operations(name, state, **extra_args): return changed -def fix_old_size_arg(params): - """ for backwards compatibility with old styled size argument """ - match = None - if params.get("size"): - match = re.fullmatch( - r"([1-9][0-9]*)(M|G|K|TRK|CYL)", str(params.get("size")), re.IGNORECASE - ) - if not match: - raise ValueError( - 'Value {0} is invalid for size argument. Valid size measurements are "K", "M", "G", "TRK" or "CYL".'.format( - str(params.get("size")) - ) - ) - if params.get("space_primary"): - match = re.fullmatch( - r"([1-9][0-9]*)(M|G|K|TRK|CYL)", - str(params.get("space_primary")), - re.IGNORECASE, - ) - if match: - params["space_primary"] = int(match.group(1)) - params["space_type"] = match.group(2) - return params - - def parse_and_validate_args(params): - params = fix_old_size_arg(params) arg_defs = dict( # Used for batch data set args @@ -1030,9 +1014,18 @@ def parse_and_validate_args(params): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type=data_set_type, required=False, dependencies=["state"]), + type=dict( + type=data_set_type, + required=False, + dependencies=["state"], + choices=DATA_SET_TYPES, + ), space_type=dict( - type=space_type, required=False, dependencies=["state"] + type=space_type, + required=False, + dependencies=["state"], + choices=["K", "M", "G", "CYL", "TRK"], + default="M", ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict( @@ -1042,7 +1035,9 @@ def parse_and_validate_args(params): type=record_format, required=False, dependencies=["state"], + choices=["FB", "VB", "FBA", "VBA", "U", "F"], aliases=["format"], + default="FB", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1114,14 +1109,22 @@ def parse_and_validate_args(params): choices=["present", "absent", "cataloged", "uncataloged"], ), type=dict(type=data_set_type, required=False, dependencies=["state"]), - space_type=dict(type=space_type, required=False, dependencies=["state"]), + space_type=dict( + type=space_type, + required=False, + dependencies=["state"], + choices=["K", "M", "G", "CYL", "TRK"], + default="M", + ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict(type="int", required=False, dependencies=["state"]), record_format=dict( type=record_format, required=False, dependencies=["state"], + choices=["FB", "VB", "FBA", "VBA", "U", "F"], aliases=["format"], + default="FB", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1179,7 +1182,7 @@ def parse_and_validate_args(params): # ["batch", "space_type"], # ["batch", "space_primary"], # ["batch", "space_secondary"], - ["batch", "record_format"], + # ["batch", "record_format"], ["batch", "sms_management_class"], ["batch", "sms_storage_class"], ["batch", "sms_data_class"], @@ -1218,11 +1221,27 @@ def run_module(): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type="str", required=False, default="PDS"), - space_type=dict(type="str", required=False, default="M"), - space_primary=dict(type="int", required=False, aliases=["size"], default=5), + type=dict( + type="str", + required=False, + default="PDS", + choices=DATA_SET_TYPES, + ), + space_type=dict( + type="str", + required=False, + default="M", + choices=["K", "M", "G", "CYL", "TRK"], + ), + space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), - record_format=dict(type="str", required=False, aliases=["format"], default="FB"), + record_format=dict( + type="str", + required=False, + aliases=["format"], + default="FB", + choices=["FB", "VB", "FBA", "VBA", "U", "F"], + ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported # SMS data class when they were actually passing as storage class @@ -1267,11 +1286,27 @@ def run_module(): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type="str", required=False, default="PDS"), - space_type=dict(type="str", required=False, default="M"), - space_primary=dict(type="raw", required=False, aliases=["size"], default=5), + type=dict( + type="str", + required=False, + default="PDS", + choices=DATA_SET_TYPES, + ), + space_type=dict( + type="str", + required=False, + default="M", + choices=["K", "M", "G", "CYL", "TRK"], + ), + space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), - record_format=dict(type="str", required=False, aliases=["format"], default="FB"), + record_format=dict( + type="str", + required=False, + aliases=["format"], + choices=["FB", "VB", "FBA", "VBA", "U", "F"], + default="FB" + ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported # SMS data class when they were actually passing as storage class @@ -1319,6 +1354,7 @@ def run_module(): # This evaluation will always occur as a result of the limitation on the # better arg parser, this will serve as a solution for now and ensure # the non-batch and batch arguments are correctly set + # This section is copied down inside if/check_mode false, so it modifies after the arg parser if module.params.get("batch") is not None: for entry in module.params.get("batch"): if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: @@ -1340,7 +1376,9 @@ def run_module(): elif module.params.get("type") is not None: if module.params.get("type").upper() in DATA_SET_TYPES_VSAM: # For VSAM types set the value to nothing and let the code manage it - module.params["record_format"] = None + # module.params["record_format"] = None + if module.params.get("record_format") is not None: + del module.params["record_format"] if not module.check_mode: try: @@ -1353,6 +1391,30 @@ def run_module(): result["names"] = [d.get("name", "") for d in data_set_param_list] for data_set_params in data_set_param_list: + # This *appears* redundant, bit the parse_and_validate reinforces the default value for record_type + if data_set_params.get("batch") is not None: + for entry in data_set_params.get("batch"): + if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: + entry["record_format"] = None + if data_set_params.get("type") is not None: + data_set_params["type"] = None + if data_set_params.get("state") is not None: + data_set_params["state"] = None + if data_set_params.get("space_type") is not None: + data_set_params["space_type"] = None + if data_set_params.get("space_primary") is not None: + data_set_params["space_primary"] = None + if data_set_params.get("space_secondary") is not None: + data_set_params["space_secondary"] = None + if data_set_params.get("replace") is not None: + data_set_params["replace"] = None + if data_set_params.get("record_format") is not None: + data_set_params["record_format"] = None + else: + if data_set_params.get("type").upper() in DATA_SET_TYPES_VSAM: + if data_set_params.get("record_format") is not None: + data_set_params["record_format"] = None + # remove unnecessary empty batch argument result["changed"] = perform_data_set_operations( **data_set_params diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 28882d9ce..0167c1b83 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -27,13 +27,14 @@ # TODO: determine if data set names need to be more generic for testcases # TODO: add additional tests to check additional data set creation parameter combinations + data_set_types = [ - ("pds"), - ("seq"), - ("pdse"), - ("esds"), - ("rrds"), - ("lds"), + ("PDS"), + ("SEQ"), + ("PDSE"), + ("ESDS"), + ("RRDS"), + ("LDS"), ] TEMP_PATH = "/tmp/jcl" @@ -152,9 +153,9 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume_1 - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume_1 + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -163,24 +164,28 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst src=TEMP_PATH + "/SAMPLE", location="USS", wait_time_s=30 ) # verify data set creation was successful + for result in results.contacted.values(): if(result.get("jobs")[0].get("ret_code") is None): submitted_job_id = result.get("jobs")[0].get("job_id") assert submitted_job_id is not None results = hosts.all.zos_job_output(job_id=submitted_job_id) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + # verify first uncatalog was performed results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # verify second uncatalog shows uncatalog already performed results = hosts.all.zos_data_set(name=dataset, state="uncataloged") + for result in results.contacted.values(): assert result.get("changed") is False # recatalog the data set results = hosts.all.zos_data_set( name=dataset, state="cataloged", volumes=volume_1 ) + for result in results.contacted.values(): assert result.get("changed") is True # verify second catalog shows catalog already performed @@ -207,9 +212,9 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume_1 - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume_1 + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -234,6 +239,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s results = hosts.all.zos_data_set( name=dataset, state="present", volumes=volume_1 ) + for result in results.contacted.values(): assert result.get("changed") is True finally: @@ -252,9 +258,9 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ volume = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -300,9 +306,9 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy volume_1 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume_1 - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume_1 + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -339,13 +345,13 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans volume_2 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) + # hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") # verify data set creation was successful for result in results.contacted.values(): @@ -368,11 +374,14 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="absent") - # ensure data set absent - results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) + # ensure second data set absent + results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_2) for result in results.contacted.values(): assert result.get("changed") is True + # ensure first data set absent + hosts.all.zos_data_set(name=dataset, state="cataloged") + results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) for result in results.contacted.values(): assert result.get("changed") is True @@ -401,7 +410,7 @@ def test_data_set_creation_when_present_replace(ansible_zos_module, dstype): try: hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set( + results = hosts.all.zos_data_set( name=dataset, state="present", type=dstype, replace=True ) results = hosts.all.zos_data_set( @@ -460,7 +469,7 @@ def test_batch_data_set_creation_and_deletion(ansible_zos_module): results = hosts.all.zos_data_set( batch=[ {"name": dataset, "state": "absent"}, - {"name": dataset, "type": "pds", "state": "present"}, + {"name": dataset, "type": "PDS", "state": "present"}, {"name": dataset, "state": "absent"}, ] ) @@ -477,11 +486,11 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": dataset, "type": "pds", "directory_blocks": 5}, - {"name": dataset + "(newmem1)", "type": "member"}, + {"name": dataset, "type": "PDS", "directory_blocks": 5}, + {"name": dataset + "(newmem1)", "type": "MEMBER"}, { "name": dataset + "(newmem2)", - "type": "member", + "type": "MEMBER", "state": "present", }, {"name": dataset, "state": "absent"}, @@ -525,7 +534,7 @@ def test_data_member_force_delete(ansible_zos_module): DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) # set up: # create pdse - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="PDSE", replace=True) for result in results.contacted.values(): assert result.get("changed") is True @@ -534,25 +543,25 @@ def test_data_member_force_delete(ansible_zos_module): batch=[ { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, @@ -769,27 +778,6 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_data_set_old_aliases(ansible_zos_module, volumes_on_systems): - volumes = Volume_Handler(volumes_on_systems) - volume_1 = volumes.get_available_vol() - try: - hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, - state="present", - format="fb", - size="5m", - volume=volume_1, - ) - for result in results.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None - finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - - def test_data_set_temp_data_set_name(ansible_zos_module): try: hosts = ansible_zos_module @@ -966,7 +954,8 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): name=DEFAULT_DATA_SET_NAME, state="present", format=formats, - size="5m", + space_primary="5", + space_type="M", volume=volume_1, ) for result in results.contacted.values(): diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 8778d80f9..42b415ae6 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -27,8 +27,6 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 9ceaf3c97..bf118f7b9 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -27,10 +27,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index 9ceaf3c97..bf118f7b9 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -27,10 +27,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 70d4764e1..8176aa2bb 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -7,10 +7,7 @@ plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 55477a2d0..0167d6c81 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -4,10 +4,7 @@ plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 55477a2d0..0167d6c81 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -4,10 +4,7 @@ plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 55477a2d0..0167d6c81 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -4,10 +4,7 @@ plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index 992ec6099..62d724706 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -26,10 +26,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From b4156f8cc12bec1e3bca38c1c2eebc9e81ca78b5 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 13 Mar 2024 13:25:51 -0700 Subject: [PATCH 325/413] [v1.10.0] [Bugfix] [zos_job_submit] Fix non-printable chars handling and testing in jobs (#1300) * Update non-printable chars test * Add support for handling JSON decode errors Add support for when ZOAU v1.3.0 and later can't read and create JSON output from a job's output. --- plugins/module_utils/job.py | 6 +- .../modules/test_zos_job_submit_func.py | 55 ++++++++++++------- 2 files changed, 39 insertions(+), 22 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1afdaed55..1f49a2b26 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -18,6 +18,10 @@ import traceback from time import sleep from timeit import default_timer as timer +# Only importing this module so we can catch a JSONDecodeError that sometimes happens +# when a job's output has non-printable chars that conflict with JSON's control +# chars. +from json import decoder from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) @@ -366,7 +370,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T single_dd["step_name"], single_dd["dd_name"] ) - except UnicodeDecodeError: + except (UnicodeDecodeError, decoder.JSONDecodeError): tmpcont = ( "Non-printable UTF-8 characters were present in this output. " "Please access it manually." diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 0694cdfa0..394a087ad 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -262,36 +262,49 @@ C_SRC_INVALID_UTF8 = """#include <stdio.h> int main() { - unsigned char a=0x64; - unsigned char b=0x2A; - unsigned char c=0xB8; - unsigned char d=0xFF; - unsigned char e=0x81; - unsigned char f=0x82; - unsigned char g=0x83; - unsigned char h=0x00; - printf("Value of a: Hex: %X, character: %c",a,a); - printf("Value of b: Hex: %X, character: %c",b,b); - printf("Value of c: Hex: %X, character: %c",c,c); - printf("Value of d: Hex: %X, character: %c",d,d); - printf("Value of a: Hex: %X, character: %c",e,e); - printf("Value of b: Hex: %X, character: %c",f,f); - printf("Value of c: Hex: %X, character: %c",g,g); - printf("Value of d: Hex: %X, character: %c",h,h); - return 0; + unsigned char a=0x64; + unsigned char b=0x2A; + unsigned char c=0xB8; + unsigned char d=0xFF; + unsigned char e=0x81; + unsigned char f=0x82; + unsigned char g=0x83; + unsigned char h=0x00; + /* The following are non-printables from DBB. */ + unsigned char nl=0x15; + unsigned char cr=0x0D; + unsigned char lf=0x25; + unsigned char shiftOut=0x0E; + unsigned char shiftIn=0x0F; + + printf("Value of a: Hex: %X, character: %c",a,a); + printf("Value of b: Hex: %X, character: %c",b,b); + printf("Value of c: Hex: %X, character: %c",c,c); + printf("Value of d: Hex: %X, character: %c",d,d); + printf("Value of e: Hex: %X, character: %c",e,e); + printf("Value of f: Hex: %X, character: %c",f,f); + printf("Value of g: Hex: %X, character: %c",g,g); + printf("Value of h: Hex: %X, character: %c",h,h); + printf("Value of NL: Hex: %X, character: %c",nl,nl); + printf("Value of CR: Hex: %X, character: %c",cr,cr); + printf("Value of LF: Hex: %X, character: %c",lf,lf); + printf("Value of Shift-Out: Hex: %X, character: %c",shiftOut,shiftOut); + printf("Value of Shift-In: Hex: %X, character: %c",shiftIn,shiftIn); + + return 0; } """ JCL_INVALID_UTF8_CHARS_EXC = """//* //****************************************************************************** //* Job that runs a C program that returns characters outside of the UTF-8 range -//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 onwards -//* that deals properly with these chars. +//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 and +//* later that deals properly with these chars. //* The JCL needs to be formatted to give it the directory where the C program //* is located. //****************************************************************************** //NOEBCDIC JOB (T043JM,JM00,1,0,0,0),'NOEBCDIC - JRM', -// MSGCLASS=X,MSGLEVEL=1,NOTIFY=&SYSUID +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=&SYSUID //NOPRINT EXEC PGM=BPXBATCH //STDPARM DD * SH ( @@ -774,7 +787,7 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): hosts.all.shell( cmd="echo {0} > {1}/noprint.c".format(quote(C_SRC_INVALID_UTF8), TEMP_PATH) ) - hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c") + hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c".format(TEMP_PATH)) # Create local JCL and submit it. tmp_file = tempfile.NamedTemporaryFile(delete=True) From 5a4f768d7472dd1524f7c80b8654dbba542996bb Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 19 Mar 2024 12:43:00 -0400 Subject: [PATCH 326/413] un-commented the blind catalog action to test if other fixes corrected the catalog issue (#1303) --- .../modules/test_zos_data_set_func.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 0167c1b83..7ab4685c0 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -153,9 +153,9 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume_1 - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume_1 + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -212,9 +212,9 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume_1 - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume_1 + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -258,9 +258,9 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ volume = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -306,9 +306,9 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy volume_1 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume_1 - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume_1 + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -345,7 +345,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans volume_2 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - # hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) + hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) hosts.all.zos_data_set(name=dataset, state="absent") From 4c2be29b07d59a7739fa6449fa7fdcaf4c682e33 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 19 Mar 2024 11:24:35 -0600 Subject: [PATCH 327/413] Enabler/692/add changelog lint (#1304) * Create bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Added changelog action * Update changelog.yml * Create close-stale-issues * Update close-stale-issues Quite el workflow dispatch * Create bandit2.yml * Update bandit2.yml * Update zos_copy.py * Update zos_copy.py Me equivoque * Create ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Added ac changelog * added lint as an option * Added documentation to ac_changelog * Changed 'lint' to 'command' on ac_changelog * Create * Create first version of the changelog action * Update changelog.yml * Fix changelog.yml * Change name of action Antsibull 'Changelog lint' to AC Changelog lint * Rename 'changelog.yml' to 'ac_changelog.yml * Create ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Change path in 'venv setup' on ac * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Removed not required github actions * Update zos_copy.py * Update ac_changelog.yml * Update ac_changelog.yml * Indented steps section * Modified changed line * Added changelog --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .github/workflows/ac_changelog.yml | 39 +++++++++++++++++++ ac | 37 ++++++++++++++++++ .../fragments/692-changelog-lint-ac-tool.yml | 8 ++++ 3 files changed, 84 insertions(+) create mode 100644 .github/workflows/ac_changelog.yml create mode 100644 changelogs/fragments/692-changelog-lint-ac-tool.yml diff --git a/.github/workflows/ac_changelog.yml b/.github/workflows/ac_changelog.yml new file mode 100644 index 000000000..523e207b9 --- /dev/null +++ b/.github/workflows/ac_changelog.yml @@ -0,0 +1,39 @@ +name: AC Changelog Lint + +on: + pull_request: + paths: + - 'changelogs/fragments/*' + branches: + - dev + - staging* + +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + pip install antsibull-changelog + + - name: Run ac-changelog + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-changelog --command lint diff --git a/ac b/ac index dad00194c..bb307f4a6 100755 --- a/ac +++ b/ac @@ -241,6 +241,32 @@ ac_build(){ $VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-* } +# ------------------------------------------------------------------------------ +# Run a changelog lint locally +# ------------------------------------------------------------------------------ +#->ac-changelog: +## Runs antsibull-changelog to generate the release changelog or perform a lint +## on changelog fragments or release notes. +## Usage: ac [--ac-changelog <command>] +## <command> - choose from 'init', 'lint', 'lint-changelog-yaml', 'release', 'generate' +## - generate generate the changelog +## - init set up changelog infrastructure for collection, or an other project +## - lint check changelog fragments for syntax errors +## - lint-changelog-yaml check syntax of changelogs/changelog.yaml file +## - release add a new release to the change metadata +## Example: +## $ ac --ac-changelog --command lint +## $ ac --ac-changelog --command release +## $ ac --ac-changelog +ac_changelog(){ + option_command=$1 + if [ ! "$option_command" ]; then + option_command="lint" + fi + message "Running Changelog '$option_command'" + . $VENV_BIN/activate && antsibull-changelog "${option_command}" +} + # ------------------------------------------------------------------------------ # Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) # ------------------------------------------------------------------------------ @@ -653,6 +679,10 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; + --ac-changelog) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-changelog" + ;; --ac-install) ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" @@ -716,6 +746,11 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--venv-stop" ;; + --command|--command=?*) # option + command=`option_processor $1 $2` + option_sanitize $command + shift + ;; --debug|--debug=?*) # option debug=`option_processor $1 $2` option_sanitize $debug @@ -800,6 +835,8 @@ if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then ac_bandit $level elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then + ac_changelog $command elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then diff --git a/changelogs/fragments/692-changelog-lint-ac-tool.yml b/changelogs/fragments/692-changelog-lint-ac-tool.yml new file mode 100644 index 000000000..cbf6bab7d --- /dev/null +++ b/changelogs/fragments/692-changelog-lint-ac-tool.yml @@ -0,0 +1,8 @@ +trivial: + - ac - Added new command ac-changelog into ac tool to run changelog + fragments lint and changelog release generation. + (https://github.com/ansible-collections/ibm_zos_core/pull/1304). + + - workflows/ac_changelog - Added new github action that will lint + changelog fragments upon a new pull request. + (https://github.com/ansible-collections/ibm_zos_core/pull/1304). \ No newline at end of file From 9c5bab3c39214ff2e4c0ab07f28a1624a0d336ae Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 20 Mar 2024 13:37:33 -0600 Subject: [PATCH 328/413] Changed case sensitive options --- .../functional/modules/test_zos_copy_func.py | 194 +++++++++--------- 1 file changed, 97 insertions(+), 97 deletions(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cf7f1494b..bbd598f1c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1684,7 +1684,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="PDSE", replace=True ) @@ -1733,7 +1733,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="PDSE", replace=True ) @@ -1784,7 +1784,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1834,7 +1834,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1884,7 +1884,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="PDSE", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1977,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2266,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2294,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2432,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2458,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2489,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2520,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2565,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) @@ -2611,14 +2611,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="member", state="present") + hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2647,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "PDSE": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2694,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "PDSE": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - hosts.all.zos_data_set(name=dest, type="member") + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest, type="MEMBER") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2838,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2854,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, ) @@ -2877,18 +2877,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "PDSE" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "seq": - hosts.all.zos_data_set(name=src, type="member") + if src_type != "PDSE": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2918,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="pds", dest_type="pds"), - dict(src_type="pds", dest_type="pdse"), - dict(src_type="pdse", dest_type="pds"), - dict(src_type="pdse", dest_type="pdse"), + dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDSE"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2973,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2984,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3006,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3018,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3111,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3122,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3132,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3143,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3261,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3272,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3300,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3312,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3453,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3464,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3524,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3615,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3626,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3651,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3663,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3827,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3878,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -3914,8 +3914,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDSE") + hosts.all.zos_data_set(name=dest, type="PDSE") for member in ds_list: hosts.all.shell( @@ -3960,8 +3960,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDSE") + hosts.all.zos_data_set(name=dest, type="PDSE") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +3994,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) +@pytest.mark.parametrize("ds_type", ["PDSE", "PDSE"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4032,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True), + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4079,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4124,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4170,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4206,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4218,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="PDSE", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4251,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) +@pytest.mark.parametrize("dest_type", ["PDSE", "PDSE"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4262,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=25, ) @@ -4294,10 +4294,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", backup=None), - dict(type="pds", backup="USER.TEST.PDS.BACKUP"), - dict(type="pdse", backup=None), - dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDS.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4343,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4359,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "seq": - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "PDSE": + hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4631,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) From 73eae1c3cd1786b242b359d5cf58d84d5b5eaf96 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 20 Mar 2024 19:57:12 -0600 Subject: [PATCH 329/413] Revert "Changed case sensitive options" This reverts commit 9c5bab3c39214ff2e4c0ab07f28a1624a0d336ae. --- .../functional/modules/test_zos_copy_func.py | 194 +++++++++--------- 1 file changed, 97 insertions(+), 97 deletions(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index bbd598f1c..cf7f1494b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1684,7 +1684,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="seq", replace=True ) @@ -1733,7 +1733,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="seq", replace=True ) @@ -1784,7 +1784,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1834,7 +1834,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1884,7 +1884,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="seq", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1977,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2266,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2294,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2432,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2458,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2489,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2520,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2565,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=80, replace=True ) @@ -2611,14 +2611,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") + hosts.all.zos_data_set(name=dest, type="member", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2647,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True), - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True), - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True) + dict(type="seq", is_binary=False), + dict(type="seq", is_binary=True), + dict(type="pds", is_binary=False), + dict(type="pds", is_binary=True), + dict(type="pdse", is_binary=False), + dict(type="pdse", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "PDSE": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2694,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True) + dict(type="seq", force=False), + dict(type="seq", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "PDSE": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) - hosts.all.zos_data_set(name=dest, type="MEMBER") + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest, type="member") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2838,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2854,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=80, ) @@ -2877,18 +2877,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "PDSE" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "PDSE": - hosts.all.zos_data_set(name=src, type="MEMBER") + if src_type != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2918,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="PDSE", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="pds", dest_type="pds"), + dict(src_type="pds", dest_type="pdse"), + dict(src_type="pdse", dest_type="pds"), + dict(src_type="pdse", dest_type="pdse"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2973,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2984,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3006,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3018,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3111,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3122,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3132,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3143,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3261,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3272,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3300,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3312,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3453,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3464,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3524,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3615,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3626,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3651,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3663,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3827,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3878,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -3914,8 +3914,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDSE") - hosts.all.zos_data_set(name=dest, type="PDSE") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for member in ds_list: hosts.all.shell( @@ -3960,8 +3960,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDSE") - hosts.all.zos_data_set(name=dest, type="PDSE") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +3994,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4032,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="PDSE", force=False), - dict(ds_type="PDSE", force=True), - dict(ds_type="PDSE", force=False), - dict(ds_type="PDSE", force=True) + dict(ds_type="pds", force=False), + dict(ds_type="pds", force=True), + dict(ds_type="pdse", force=False), + dict(ds_type="pdse", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4079,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4124,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4170,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4206,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4218,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4251,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4262,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=25, ) @@ -4294,10 +4294,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", backup=None), - dict(type="PDSE", backup="USER.TEST.PDS.BACKUP"), - dict(type="PDSE", backup=None), - dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), + dict(type="pds", backup=None), + dict(type="pds", backup="USER.TEST.PDS.BACKUP"), + dict(type="pdse", backup=None), + dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4343,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4359,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "PDSE": - hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') + if src_type != "seq": + hosts.all.zos_data_set(name=source_member, type="member", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4631,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) From 6e08d0730800502958876813bdd7577199dc8d50 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 29 Mar 2024 09:06:26 -0600 Subject: [PATCH 330/413] Added Needs Triage on bug template (#1314) --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 2193cb615..9395c85b1 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,7 +1,7 @@ name: Report a bug description: Request that a bug be reviewed. Complete all required fields. title: "[Bug] Enter description" -labels: [Bug] +labels: ["Bug", "Needs Triage" ] assignees: - IBMAnsibleHelper body: From 3b4951042c5ae79587e66e52dae2fa27dc922e1a Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 29 Mar 2024 15:43:19 -0600 Subject: [PATCH 331/413] Add galaxy importer into ac as a command and create a GitHub action (#1305) * Create bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Added changelog action * Update changelog.yml * Create close-stale-issues * Update close-stale-issues Quite el workflow dispatch * Create bandit2.yml * Update bandit2.yml * Update zos_copy.py * Update zos_copy.py Me equivoque * Create ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Create ac_changelog.yml * Delete .github/workflows/ac_changelog.yml * Create ac_changelog.yml * Update ac_changelog.yml * Add galaxy importer to ac and create workflow with the ac command for it * Delete a jump of line * Create ac-galaxy-importer.yml * Rename action * Rename job * Update ac-galaxy-importer.yml * Fix * Fix * Rename ac-galaxy-importer to ac-galaxy-importer.yml * Acomodate function documentation in ac * Delete invasive files * Added line * Update ac * Update ac * Update ac --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .github/workflows/ac-galaxy-importer.yml | 40 ++++++++++++++++++++++++ ac | 18 +++++++++++ 2 files changed, 58 insertions(+) create mode 100644 .github/workflows/ac-galaxy-importer.yml diff --git a/.github/workflows/ac-galaxy-importer.yml b/.github/workflows/ac-galaxy-importer.yml new file mode 100644 index 000000000..271f01c22 --- /dev/null +++ b/.github/workflows/ac-galaxy-importer.yml @@ -0,0 +1,40 @@ +name: AC Galaxy Importer + +on: + pull_request: + branches: + - dev + - staging* + +jobs: + galaxy-importer: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install ansible + pip install ansible-importer + pip install galaxy-importer + + - name: Run ac-galaxy-importer + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-galaxy-importer diff --git a/ac b/ac index bb307f4a6..9aee6a02d 100755 --- a/ac +++ b/ac @@ -242,6 +242,18 @@ ac_build(){ } # ------------------------------------------------------------------------------ +# Run galaxy importer on collection. +# ------------------------------------------------------------------------------ +#->ac-galaxy-importer: +## Build current branch and run galaxy importer on collection. +## Usage: ac [--ac-galaxy-importer] +## Example: +## $ ac --ac-galaxy-importer +ac_galaxy_importer(){ + message "Running Galaxy Importer" + . $VENV_BIN/activate && collection_name=$($VENV_BIN/ansible-galaxy collection build --force | awk -F/ '{print $NF}') && python -m galaxy_importer.main $collection_name +} + # Run a changelog lint locally # ------------------------------------------------------------------------------ #->ac-changelog: @@ -679,6 +691,10 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; + --ac-galaxy-importer) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-galaxy-importer" + ;; --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" @@ -835,6 +851,8 @@ if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then ac_bandit $level elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ; then + ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then ac_changelog $command elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then From 5788acdc7a16895407189603aee0c7ab965352ec Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 29 Mar 2024 15:45:03 -0600 Subject: [PATCH 332/413] Create bandit github action using the ac command (#1310) --- .github/workflows/ac-bandit.yml | 38 +++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/ac-bandit.yml diff --git a/.github/workflows/ac-bandit.yml b/.github/workflows/ac-bandit.yml new file mode 100644 index 000000000..288fb92b1 --- /dev/null +++ b/.github/workflows/ac-bandit.yml @@ -0,0 +1,38 @@ +name: AC Bandit + +on: + pull_request: + branches: + - dev + - staging* + +jobs: + bandit: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install bandit + + - name: Run ac-bandit + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-bandit --level l From f7e9c1bc3f27291009c5387e597e39c71405b7eb Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 29 Mar 2024 15:57:54 -0600 Subject: [PATCH 333/413] [Enabler] [zos_copy] Fix sanity issues and remove ignore files (#1307) * Removed localchartset * Fixed sanity * Updated to encoding * Updated encoding parser * Fixed encoding to null when remote_src=true * Updated condition to set encoding to none * removed size parameter * Changed src * Added full local src * Corrected base name for temp_path * Fixed pep8 issue * Changed temp_path to src logic * Added src to temp_path * Added module fail * Replaced temp name generation * Placed temporary file into tmp folder * Removing temp_path * Added latest temp path changes * Fixed lock check issue * Removed temp_path * Removed temp path * Removed is_something vasrs * Fixed comment * Added latest zos_copy_changes * Removed print statements * Removed ingore entry * removed entries * Corrected case sensitivity in tests * Fixed lowercase * Modified docs * Added changelog --- .../fragments/1307-update-sanity-zos_copy.yml | 10 + plugins/action/zos_copy.py | 61 +++-- plugins/modules/zos_copy.py | 224 ++++++++---------- .../functional/modules/test_zos_copy_func.py | 200 ++++++++-------- tests/sanity/ignore-2.14.txt | 2 - tests/sanity/ignore-2.15.txt | 2 - tests/sanity/ignore-2.16.txt | 2 - 7 files changed, 241 insertions(+), 260 deletions(-) create mode 100644 changelogs/fragments/1307-update-sanity-zos_copy.yml diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml new file mode 100644 index 000000000..858f0b64c --- /dev/null +++ b/changelogs/fragments/1307-update-sanity-zos_copy.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_copy - Documented `group` and `owner` options. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + +trivial: + - zos_copy - Removed many of the variables that were passed from the + action plugin to the module, reimplementing the logic inside the + module instead. Removed the use of temp_path variable inside zos_copy + in favor of using remote_src to deal with files copied to remote. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 592126b00..e9c238b87 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -29,11 +29,10 @@ from ansible import cli from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - is_member, - is_data_set + is_member ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template @@ -69,8 +68,8 @@ def run(self, tmp=None, task_vars=None): owner = task_args.get("owner", None) group = task_args.get("group", None) - is_pds = is_src_dir = False - temp_path = is_uss = is_mvs_dest = src_member = None + is_src_dir = False + temp_path = is_uss = None if dest: if not isinstance(dest, string_types): @@ -78,7 +77,6 @@ def run(self, tmp=None, task_vars=None): return self._exit_action(result, msg, failed=True) else: is_uss = "/" in dest - is_mvs_dest = is_data_set(dest) else: msg = "Destination is required" return self._exit_action(result, msg, failed=True) @@ -96,13 +94,11 @@ def run(self, tmp=None, task_vars=None): msg = "'src' or 'dest' must not be empty" return self._exit_action(result, msg, failed=True) else: - src_member = is_member(src) if not remote_src: if src.startswith('~'): src = os.path.expanduser(src) src = os.path.realpath(src) is_src_dir = os.path.isdir(src) - is_pds = is_src_dir and is_mvs_dest if not src and not content: msg = "'src' or 'content' is required" @@ -196,11 +192,6 @@ def run(self, tmp=None, task_vars=None): src = rendered_dir - task_args["size"] = sum( - os.stat(os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(f))).st_size - for path, dirs, files in os.walk(src) - for f in files - ) else: if mode == "preserve": task_args["mode"] = "0{0:o}".format( @@ -231,7 +222,6 @@ def run(self, tmp=None, task_vars=None): src = rendered_file - task_args["size"] = os.stat(src).st_size display.vvv(u"ibm_zos_copy calculated size: {0}".format(os.stat(src).st_size), host=self._play_context.remote_addr) transfer_res = self._copy_to_remote( src, is_dir=is_src_dir, ignore_stderr=ignore_sftp_stderr @@ -242,15 +232,31 @@ def run(self, tmp=None, task_vars=None): return transfer_res display.vvv(u"ibm_zos_copy temp path: {0}".format(transfer_res.get("temp_path")), host=self._play_context.remote_addr) + if not encoding: + encoding = { + "from": encode.Defaults.get_default_system_charset(), + } + + """ + We format temp_path correctly to pass it as src option to the module, + we keep the original source to return to the user and avoid confusion + by returning the temp_path created. + """ + original_src = task_args.get("src") + if original_src: + if not remote_src: + base_name = os.path.basename(original_src) + if original_src.endswith("/"): + src = temp_path + "/" + else: + src = temp_path + else: + src = temp_path + task_args.update( dict( - is_uss=is_uss, - is_pds=is_pds, - is_src_dir=is_src_dir, - src_member=src_member, - temp_path=temp_path, - is_mvs_dest=is_mvs_dest, - local_charset=encode.Defaults.get_default_system_charset() + src=src, + encoding=encoding, ) ) copy_res = self._execute_module( @@ -284,17 +290,20 @@ def run(self, tmp=None, task_vars=None): self._remote_cleanup(dest, copy_res.get("dest_exists"), task_vars) return result - return _update_result(is_binary, copy_res, self._task.args) + return _update_result(is_binary, copy_res, self._task.args, original_src) def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): """Copy a file or directory to the remote z/OS system """ - temp_path = "/{0}/{1}".format(gettempprefix(), _create_temp_path_name()) + temp_path = "/{0}/{1}/{2}".format(gettempprefix(), _create_temp_path_name(), os.path.basename(src)) + self._connection.exec_command("mkdir -p {0}".format(os.path.dirname(temp_path))) _src = src.replace("#", "\\#") _sftp_action = 'put' + full_temp_path = temp_path if is_dir: src = src.rstrip("/") if src.endswith("/") else src + temp_path = os.path.dirname(temp_path) base = os.path.basename(src) self._connection.exec_command("mkdir -p {0}/{1}".format(temp_path, base)) _sftp_action += ' -r' # add '-r` to clone the source trees @@ -379,7 +388,7 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): display.vvv(u"ibm_zos_copy SSH transfer method restored to {0}".format(user_ssh_transfer_method), host=self._play_context.remote_addr) is_ssh_transfer_method_updated = False - return dict(temp_path=temp_path) + return dict(temp_path=full_temp_path) def _remote_cleanup(self, dest, dest_exists, task_vars): """Remove all files or data sets pointed to by 'dest' on the remote @@ -417,7 +426,7 @@ def _exit_action(self, result, msg, failed=False): return result -def _update_result(is_binary, copy_res, original_args): +def _update_result(is_binary, copy_res, original_args, original_src): """ Helper function to update output result with the provided values """ ds_type = copy_res.get("ds_type") src = copy_res.get("src") @@ -431,7 +440,7 @@ def _update_result(is_binary, copy_res, original_args): invocation=dict(module_args=original_args), ) if src: - updated_result["src"] = src + updated_result["src"] = original_src if note: updated_result["note"] = note if backup_name: diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index a854d1cae..6991c4d81 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -143,7 +143,7 @@ to: description: - The encoding to be converted to - required: true + required: false type: str tmp_hlq: description: @@ -243,6 +243,15 @@ type: bool default: true required: false + group: + description: + - Name of the group that will own the file system objects. + - When left unspecified, it uses the current group of the current user + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false mode: description: - The permission of the destination file or directory. @@ -261,6 +270,15 @@ the source file. type: str required: false + owner: + description: + - Name of the user that should own the filesystem object, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false remote_src: description: - If set to C(false), the module searches for C(src) at the local machine. @@ -803,37 +821,35 @@ """ -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - ZOAUImportError, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( - idcams -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, data_set, encode, backup, copy, validation, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( - AnsibleModuleHelper, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - is_member -) -from ansible.module_utils._text import to_bytes, to_native -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 -from re import IGNORECASE -from hashlib import sha256 import glob +import math +import os import shutil import stat -import math import tempfile -import os import traceback +from hashlib import sha256 +from re import IGNORECASE + +from ansible.module_utils._text import to_bytes, to_native +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six import PY3 +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + backup, better_arg_parser, copy, data_set, encode, validation) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import \ + AnsibleModuleHelper +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + is_member, + is_data_set +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ + ZOAUImportError +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import \ + idcams if PY3: - from re import fullmatch import pathlib + from re import fullmatch else: from re import match as fullmatch @@ -892,7 +908,6 @@ def run_command(self, cmd, **kwargs): def copy_to_seq( self, src, - temp_path, conv_path, dest, src_type @@ -904,13 +919,11 @@ def copy_to_seq( Arguments: src {str} -- Path to USS file or data set name - temp_path {str} -- Path to the location where the control node - transferred data to conv_path {str} -- Path to the converted source file dest {str} -- Name of destination data set src_type {str} -- Type of the source """ - new_src = conv_path or temp_path or src + new_src = conv_path or src copy_args = dict() copy_args["options"] = "" @@ -1031,15 +1044,15 @@ def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): entries = list(itr) return self._copy_tree(entries, src_dir, dest_dir, dirs_exist_ok=dirs_exist_ok) - def convert_encoding(self, src, temp_path, encoding): + def convert_encoding(self, src, encoding, remote_src): """Convert encoding for given src Arguments: src {str} -- Path to the USS source file or directory - temp_path {str} -- Path to the location where the control node - transferred data to encoding {dict} -- Charsets that the source is to be converted from and to + remote_src {bool} -- Whether the file was already on the remote + node or not. Raises: CopyOperationError -- When the encoding of a USS file is not @@ -1051,19 +1064,10 @@ def convert_encoding(self, src, temp_path, encoding): from_code_set = encoding.get("from") to_code_set = encoding.get("to") enc_utils = encode.EncodeUtils() - new_src = temp_path or src - + new_src = src if os.path.isdir(new_src): - if temp_path: - if src.endswith("/"): - new_src = "{0}/{1}".format( - temp_path, os.path.basename(os.path.dirname(src)) - ) - else: - new_src = "{0}/{1}".format(temp_path, - os.path.basename(src)) try: - if not temp_path: + if remote_src: temp_dir = tempfile.mkdtemp() shutil.copytree(new_src, temp_dir, dirs_exist_ok=True) new_src = temp_dir @@ -1081,7 +1085,7 @@ def convert_encoding(self, src, temp_path, encoding): raise CopyOperationError(msg=str(err)) else: try: - if not temp_path: + if remote_src: fd, temp_src = tempfile.mkstemp() os.close(fd) shutil.copy(new_src, temp_src) @@ -1270,24 +1274,23 @@ def copy_to_uss( src, dest, conv_path, - temp_path, src_ds_type, src_member, member_name, - force + force, + content_copy, ): """Copy a file or data set to a USS location Arguments: src {str} -- The USS source dest {str} -- Destination file or directory on USS - temp_path {str} -- Path to the location where the control node - transferred data to conv_path {str} -- Path to the converted source file or directory src_ds_type {str} -- Type of source src_member {bool} -- Whether src is a data set member member_name {str} -- The name of the source data set member force {bool} -- Whether to copy files to an already existing directory + content_copy {bool} -- Whether copy is using content option or not. Returns: {str} -- Destination where the file was copied to @@ -1322,11 +1325,11 @@ def copy_to_uss( if "File exists" not in err: raise CopyOperationError(msg=to_native(err)) - if os.path.isfile(temp_path or conv_path or src): - dest = self._copy_to_file(src, dest, conv_path, temp_path) + if os.path.isfile(conv_path or src): + dest = self._copy_to_file(src, dest, content_copy, conv_path) changed_files = None else: - dest, changed_files = self._copy_to_dir(src, dest, conv_path, temp_path, force) + dest, changed_files = self._copy_to_dir(src, dest, conv_path, force) if self.common_file_args is not None: mode = self.common_file_args.get("mode") @@ -1347,14 +1350,13 @@ def copy_to_uss( self.module.set_owner_if_different(dest, owner, False) return dest - def _copy_to_file(self, src, dest, conv_path, temp_path): + def _copy_to_file(self, src, dest, content_copy, conv_path): """Helper function to copy a USS src to USS dest. Arguments: src {str} -- USS source file path dest {str} -- USS dest file path - temp_path {str} -- Path to the location where the control node - transferred data to + content_copy {bool} -- Whether copy is using content option or not. conv_path {str} -- Path to the converted source file or directory Raises: @@ -1363,11 +1365,10 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): Returns: {str} -- Destination where the file was copied to """ - src_path = os.path.basename(src) if src else "inline_copy" + src_path = os.path.basename(src) if not content_copy else "inline_copy" if os.path.isdir(dest): dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) - - new_src = temp_path or conv_path or src + new_src = conv_path or src try: if self.is_binary: copy.copy_uss2uss_binary(new_src, dest) @@ -1402,7 +1403,6 @@ def _copy_to_dir( src_dir, dest_dir, conv_path, - temp_path, force ): """Helper function to copy a USS directory to another USS directory. @@ -1413,8 +1413,6 @@ def _copy_to_dir( src_dir {str} -- USS source directory dest_dir {str} -- USS dest directory conv_path {str} -- Path to the converted source directory - temp_path {str} -- Path to the location where the control node - transferred data to force {bool} -- Whether to copy files to an already existing directory Raises: @@ -1426,14 +1424,7 @@ def _copy_to_dir( that got copied. """ copy_directory = True if not src_dir.endswith("/") else False - - if temp_path: - temp_path = "{0}/{1}".format( - temp_path, - os.path.basename(os.path.normpath(src_dir)) - ) - - new_src_dir = temp_path or conv_path or src_dir + new_src_dir = conv_path or src_dir new_src_dir = os.path.normpath(new_src_dir) dest = dest_dir changed_files, original_permissions = self._get_changed_files(new_src_dir, dest_dir, copy_directory) @@ -1661,7 +1652,6 @@ def __init__( def copy_to_pdse( self, src, - temp_path, conv_path, dest, src_ds_type, @@ -1676,8 +1666,6 @@ def copy_to_pdse( Arguments: src {str} -- Path to USS file/directory or data set name. - temp_path {str} -- Path to the location where the control node - transferred data to. conv_path {str} -- Path to the converted source file/directory. dest {str} -- Name of destination data set. src_ds_type {str} -- The type of source. @@ -1685,7 +1673,7 @@ def copy_to_pdse( dest_member {str, optional} -- Name of destination member in data set. encoding {dict, optional} -- Dictionary with encoding options. """ - new_src = conv_path or temp_path or src + new_src = conv_path or src src_members = [] dest_members = [] @@ -2660,15 +2648,10 @@ def run_module(module, arg_def): owner = module.params.get('owner') encoding = module.params.get('encoding') volume = module.params.get('volume') - is_uss = module.params.get('is_uss') - is_pds = module.params.get('is_pds') - is_src_dir = module.params.get('is_src_dir') - is_mvs_dest = module.params.get('is_mvs_dest') - temp_path = module.params.get('temp_path') - src_member = module.params.get('src_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') force_lock = module.params.get('force_lock') + content = module.params.get('content') dest_data_set = module.params.get('dest_data_set') if dest_data_set: @@ -2676,6 +2659,13 @@ def run_module(module, arg_def): dest_data_set["volumes"] = [volume] copy_member = is_member(dest) + # This section we initialize different variables + # that we used to pass from the action plugin. + is_src_dir = os.path.isdir(src) + is_uss = "/" in dest + is_mvs_dest = is_data_set(dest) + is_pds = is_src_dir and is_mvs_dest + src_member = is_member(src) # ******************************************************************** # When copying to and from a data set member, 'dest' or 'src' will be @@ -2722,18 +2712,17 @@ def run_module(module, arg_def): # data sets with record format 'FBA' or 'VBA'. src_has_asa_chars = dest_has_asa_chars = False try: - # If temp_path, the plugin has copied a file from the controller to USS. - if temp_path or "/" in src: + if "/" in src: src_ds_type = "USS" - if remote_src and os.path.isdir(src): + if os.path.isdir(src): is_src_dir = True # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. if not is_binary and not is_uss and not executable: - new_src = temp_path or src + new_src = src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). encode_utils = encode.EncodeUtils() @@ -2790,9 +2779,8 @@ def run_module(module, arg_def): if is_uss: dest_ds_type = "USS" if src_ds_type == "USS" and not is_src_dir and (dest.endswith("/") or os.path.isdir(dest)): - src_basename = os.path.basename(src) if src else "inline_copy" + src_basename = os.path.basename(src) if not content else "inline_copy" dest = os.path.normpath("{0}/{1}".format(dest, src_basename)) - if dest.startswith("//"): dest = dest.replace("//", "/") @@ -2841,12 +2829,7 @@ def run_module(module, arg_def): if copy_member: dest_member_exists = dest_exists and data_set.DataSet.data_set_member_exists(dest) elif src_ds_type == "USS": - if temp_path: - root_dir = "{0}/{1}".format(temp_path, os.path.basename(os.path.normpath(src))) - root_dir = os.path.normpath(root_dir) - else: - root_dir = src - + root_dir = src dest_member_exists = dest_exists and data_set.DataSet.files_in_data_set_members(root_dir, dest) elif src_ds_type in data_set.DataSet.MVS_PARTITIONED: dest_member_exists = dest_exists and data_set.DataSet.data_set_shared_members(src, dest) @@ -2987,17 +2970,13 @@ def run_module(module, arg_def): # original one. This change applies only to the # allocate_destination_data_set call. if converted_src: - if remote_src: - original_src = src - src = converted_src - else: - original_temp = temp_path - temp_path = converted_src + original_src = src + src = converted_src try: if not is_uss: res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set( - temp_path or src, + src, dest_name, src_ds_type, dest_ds_type, dest_exists, @@ -3010,20 +2989,14 @@ def run_module(module, arg_def): ) except Exception as err: if converted_src: - if remote_src: - src = original_src - else: - temp_path = original_temp + src = original_src module.fail_json( msg="Unable to allocate destination data set: {0}".format(str(err)), dest_exists=dest_exists ) if converted_src: - if remote_src: - src = original_src - else: - temp_path = original_temp + src = original_src # ******************************************************************** # Encoding conversion is only valid if the source is a local file, @@ -3044,7 +3017,7 @@ def run_module(module, arg_def): # if is_mvs_dest: # encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET - conv_path = copy_handler.convert_encoding(src, temp_path, encoding) + conv_path = copy_handler.convert_encoding(src, encoding, remote_src) # ------------------------------- o ----------------------------------- # Copy to USS file or directory @@ -3068,17 +3041,17 @@ def run_module(module, arg_def): src, dest, conv_path, - temp_path, src_ds_type, src_member, member_name, - force + force, + bool(content) ) res_args['size'] = os.stat(dest).st_size remote_checksum = dest_checksum = None try: - remote_checksum = get_file_checksum(temp_path or src) + remote_checksum = get_file_checksum(src) dest_checksum = get_file_checksum(dest) if validate: @@ -3100,12 +3073,11 @@ def run_module(module, arg_def): elif dest_ds_type in data_set.DataSet.MVS_SEQ: # TODO: check how ASA behaves with this if src_ds_type == "USS" and not is_binary: - new_src = conv_path or temp_path or src + new_src = conv_path or src conv_path = normalize_line_endings(new_src, encoding) copy_handler.copy_to_seq( src, - temp_path, conv_path, dest, src_ds_type @@ -3117,8 +3089,6 @@ def run_module(module, arg_def): # Copy to PDS/PDSE # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY": - if not remote_src and not copy_member and os.path.isdir(temp_path): - temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( module, @@ -3132,7 +3102,6 @@ def run_module(module, arg_def): pdse_copy_handler.copy_to_pdse( src, - temp_path, conv_path, dest_name, src_ds_type, @@ -3163,7 +3132,7 @@ def run_module(module, arg_def): ) ) - return res_args, temp_path, conv_path + return res_args, conv_path def main(): @@ -3185,7 +3154,7 @@ def main(): ), "to": dict( type='str', - required=True, + required=False, ) } ), @@ -3255,14 +3224,6 @@ def main(): auto_reload=dict(type='bool', default=False), ) ), - is_uss=dict(type='bool'), - is_pds=dict(type='bool'), - is_src_dir=dict(type='bool'), - is_mvs_dest=dict(type='bool'), - size=dict(type='int'), - temp_path=dict(type='str'), - src_member=dict(type='bool'), - local_charset=dict(type='str'), force=dict(type='bool', default=False), force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), @@ -3333,15 +3294,16 @@ def main(): ) if ( - not module.params.get("encoding") + not module.params.get("encoding").get("to") and not module.params.get("remote_src") and not module.params.get("is_binary") and not module.params.get("executable") ): - module.params["encoding"] = { - "from": module.params.get("local_charset"), - "to": encode.Defaults.get_default_system_charset(), - } + module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset() + elif ( + not module.params.get("encoding").get("to") + ): + module.params["encoding"] = None if module.params.get("encoding"): module.params.update( @@ -3357,15 +3319,15 @@ def main(): ) ) - res_args = temp_path = conv_path = None + res_args = conv_path = None try: - res_args, temp_path, conv_path = run_module(module, arg_def) + res_args, conv_path = run_module(module, arg_def) module.exit_json(**res_args) except CopyOperationError as err: cleanup([]) module.fail_json(**(err.json_args)) finally: - cleanup([temp_path, conv_path]) + cleanup([conv_path]) class EncodingConversionError(Exception): diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cf7f1494b..6e6a9a073 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -794,6 +794,12 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ @pytest.mark.uss @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): + """ + This test evaluates the behavior of testing copy of a directory when src ends + with '/' versus only the dir name. Expectation is that when only dir name is provided + that directory is also created on the remote, when directory name ends with '/' + this means we only copy that directory contents without creating it on the remote. + """ hosts = ansible_zos_module dest_path = "/tmp/new_dir" @@ -1684,7 +1690,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", replace=True ) @@ -1733,7 +1739,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", replace=True ) @@ -1784,7 +1790,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1834,7 +1840,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1884,7 +1890,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1983,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2272,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2300,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2438,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2464,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2495,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2526,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2571,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) @@ -2611,14 +2617,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="member", state="present") + hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2653,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="SEQ", is_binary=False), + dict(type="SEQ", is_binary=True), + dict(type="PDS", is_binary=False), + dict(type="PDS", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2700,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="SEQ", force=False), + dict(type="SEQ", force=True), + dict(type="PDS", force=False), + dict(type="PDS", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - hosts.all.zos_data_set(name=dest, type="member") + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest, type="MEMBER") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2844,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2860,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, ) @@ -2877,18 +2883,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "seq": - hosts.all.zos_data_set(name=src, type="member") + if src_type != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2924,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="pds", dest_type="pds"), - dict(src_type="pds", dest_type="pdse"), - dict(src_type="pdse", dest_type="pds"), - dict(src_type="pdse", dest_type="pdse"), + dict(src_type="PDS", dest_type="PDS"), + dict(src_type="PDS", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDS"), + dict(src_type="PDSE", dest_type="PDSE"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2979,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2990,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3012,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3024,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3117,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3128,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3138,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3149,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3267,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3278,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3306,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3318,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3459,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3470,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3530,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3621,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3632,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3657,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3669,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3833,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3884,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -3914,8 +3920,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDS") + hosts.all.zos_data_set(name=dest, type="PDS") for member in ds_list: hosts.all.shell( @@ -3960,8 +3966,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDS") + hosts.all.zos_data_set(name=dest, type="PDS") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +4000,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) +@pytest.mark.parametrize("ds_type", ["PDS", "PDSE"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4038,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="PDS", force=False), + dict(ds_type="PDS", force=True), + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4085,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4130,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4176,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4212,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="PDS", force=False), + dict(type="PDS", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4224,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="SEQ", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4257,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) +@pytest.mark.parametrize("dest_type", ["PDS", "PDSE"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4268,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=25, ) @@ -4294,10 +4300,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", backup=None), - dict(type="pds", backup="USER.TEST.PDS.BACKUP"), - dict(type="pdse", backup=None), - dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), + dict(type="PDS", backup=None), + dict(type="PDS", backup="USER.TEST.PDS.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4349,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4365,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "seq": - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "SEQ": + hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4637,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ed26cf81b29f896b477156252a23f9ba9fb645d2 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 2 Apr 2024 09:04:12 -0700 Subject: [PATCH 334/413] Cherry picking 1.9 into dev (#1346) * [v1.9.0] Collaboration 1246 to add typrun support for zos_job_submit (#1283) * Fixes typo in property Signed-off-by: ddimatos <dimatos@gmail.com> * Initial commit for supporting typrun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update jobs and zos_job_submit to better support jobs in the input queue Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit to remove other typrun scans from regex Signed-off-by: ddimatos <dimatos@gmail.com> * The ret_code msg field should have only had the status in it, not the RC Signed-off-by: ddimatos <dimatos@gmail.com> * Update msg_txt for jobs JCLHOlD, HOLD Signed-off-by: ddimatos <dimatos@gmail.com> * Update test cases with typrun Signed-off-by: ddimatos <dimatos@gmail.com> * Lint updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to controll the messages to the ret_code property Signed-off-by: ddimatos <dimatos@gmail.com> * Update wait times as result of the timer fix forced tests to add more time Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/module_utils/job.py plugins/modules/zos_job_submit.py tests/functional/modules/test_zos_job_submit_func.py Changes to be committed: new file: changelogs/fragments/1246-bugfix-zos_job_submit-typrun.yml modified: plugins/module_utils/job.py modified: plugins/modules/zos_job_submit.py modified: tests/functional/modules/test_zos_job_submit_func.py * [v1.9.0] Document the collections SFTP requirement and file tagging. (#1296) * Fixes typo in property Signed-off-by: ddimatos <dimatos@gmail.com> * Initial commit for supporting typrun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update jobs and zos_job_submit to better support jobs in the input queue Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit to remove other typrun scans from regex Signed-off-by: ddimatos <dimatos@gmail.com> * The ret_code msg field should have only had the status in it, not the RC Signed-off-by: ddimatos <dimatos@gmail.com> * Update msg_txt for jobs JCLHOlD, HOLD Signed-off-by: ddimatos <dimatos@gmail.com> * Update test cases with typrun Signed-off-by: ddimatos <dimatos@gmail.com> * Lint updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to controll the messages to the ret_code property Signed-off-by: ddimatos <dimatos@gmail.com> * Update wait times as result of the timer fix forced tests to add more time Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc for zos_archive to reference src over path Signed-off-by: ddimatos <dimatos@gmail.com> * Update docs to reference the SFTP requirement Signed-off-by: ddimatos <dimatos@gmail.com> * Update plugin doc Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy to explain that file tagging (chtag) is performed on updated USS files Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * update galaxy.yml Signed-off-by: ddimatos <dimatos@gmail.com> * Updte meta/runtime.yml with the version 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> * Update meta collection with lastest versions Signed-off-by: ddimatos <dimatos@gmail.com> * Update README Signed-off-by: ddimatos <dimatos@gmail.com> * Update lint and galaxy to reflect 2.14 Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog summary Signed-off-by: ddimatos <dimatos@gmail.com> * update versions for zoau version checker Signed-off-by: ddimatos <dimatos@gmail.com> * Fix array syntax Signed-off-by: ddimatos <dimatos@gmail.com> * Documentation required for wtor filter Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog meta Signed-off-by: ddimatos <dimatos@gmail.com> * update filters general doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update submit modules doc Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/modules/zos_job_submit.py Changes to be committed: modified: plugins/modules/zos_job_submit.py * Update the rst for submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes rst Signed-off-by: ddimatos <dimatos@gmail.com> * Correct lint warning Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy module doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update RST for zos_copy Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright year Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/modules/zos_apf.py Changes to be committed: modified: docs/source/release_notes.rst modified: tests/functional/modules/test_zos_job_query_func.py * Delete changelog fragments after generating CHANGELOG Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml changelogs/fragments/1261-job-submit-non-utf8-chars.yml changelogs/fragments/1292-doc-zos_tso_command-example.yml changelogs/fragments/1295-doc-zos_ping-scp.yml Changes to be committed: deleted: changelogs/fragments/1246-bugfix-zos_job_submit-typrun.yml deleted: changelogs/fragments/1296-doc-sftp-collection-requirements.yml deleted: changelogs/fragments/v1.9.0_summary.yml * Update source comment to align to code change Signed-off-by: ddimatos <dimatos@gmail.com> * Update source documentation after pull request review Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/modules/zos_job_submit.py Changes to be committed: modified: docs/source/release_notes.rst modified: plugins/filter/wtor.py modified: plugins/module_utils/job.py modified: plugins/modules/zos_copy.py modified: plugins/modules/zos_job_submit.py modified: plugins/modules/zos_ping.py modified: plugins/modules/zos_tso_command.py modified: tests/functional/modules/test_zos_job_query_func.py modified: tests/functional/modules/test_zos_job_submit_func.py * Typo correction Signed-off-by: ddimatos <dimatos@gmail.com> * Update Galaxy Signed-off-by: ddimatos <dimatos@gmail.com> * Update RST Signed-off-by: ddimatos <dimatos@gmail.com> * Changes to submit module after forward porting typrun support Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * Update test cases to use upper case data set types due to choice requirments Signed-off-by: ddimatos <dimatos@gmail.com> * Updated test expected text Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo and added test cleanup --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .ansible-lint | 1 + CHANGELOG.rst | 22 +- README.md | 11 +- changelogs/.plugin-cache.yaml | 8 +- changelogs/changelog.yaml | 70 +++++ docs/source/filters.rst | 10 +- docs/source/modules/zos_archive.rst | 10 +- docs/source/modules/zos_backup_restore.rst | 9 + docs/source/modules/zos_copy.rst | 6 +- docs/source/modules/zos_data_set.rst | 28 +- docs/source/modules/zos_fetch.rst | 2 +- docs/source/modules/zos_job_submit.rst | 53 ++-- docs/source/modules/zos_script.rst | 4 +- docs/source/modules/zos_tso_command.rst | 2 +- docs/source/modules/zos_unarchive.rst | 18 +- docs/source/plugins.rst | 37 +-- docs/source/release_notes.rst | 93 +++++-- galaxy.yml | 3 +- meta/ibm_zos_core_meta.yml | 6 +- meta/runtime.yml | 2 +- plugins/filter/wtor.py | 55 ++++ plugins/module_utils/job.py | 137 ++++++---- plugins/modules/zos_copy.py | 23 +- plugins/modules/zos_fetch.py | 9 +- plugins/modules/zos_job_submit.py | 253 ++++++++++++------ plugins/modules/zos_ping.py | 2 +- plugins/modules/zos_ping.rexx | 2 +- plugins/modules/zos_script.py | 11 +- plugins/modules/zos_tso_command.py | 2 +- plugins/modules/zos_unarchive.py | 13 +- .../modules/test_zos_job_query_func.py | 6 +- .../modules/test_zos_job_submit_func.py | 232 +++++++++++++--- tests/unit/test_zoau_version_checker_unit.py | 16 +- 33 files changed, 842 insertions(+), 314 deletions(-) diff --git a/.ansible-lint b/.ansible-lint index 821806e3a..9d40faf3b 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -35,6 +35,7 @@ exclude_paths: - tests/sanity/ignore-2.11.txt - tests/sanity/ignore-2.12.txt - tests/sanity/ignore-2.13.txt + - tests/sanity/ignore-2.14.txt - venv* parseable: true quiet: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 505a98474..d2f69d546 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,18 +5,23 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.9.0-beta.1 -============= +v1.9.0 +====== Release Summary --------------- -Release Date: '2024-01-31' +Release Date: '2024-03-11' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ +Major Changes +------------- + +- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + Minor Changes ------------- @@ -32,11 +37,22 @@ Minor Changes Bugfixes -------- +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). +- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). - zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). - zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). +- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). diff --git a/README.md b/README.md index da3b114d4..b2345c118 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible Core** versions >=2.14. +This collection has been tested against **Ansible Core** versions >=2.15. The Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** @@ -64,11 +64,12 @@ for more more information on supported versions of Ansible. Other Dependencies ================== -This release of the **IBM z/OS core collection** requires the z/OS managed node have: -- [z/OS](https://www.ibm.com/docs/en/zos) V2R4 or later. +This release of the **IBM z/OS core collection** requires the z/OS managed node have the following: +- [z/OS](https://www.ibm.com/docs/en/zos) - [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). -- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. -- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) 1.2.5 (or later) but prior to version 1.3. +- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) +- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) +For specific dependency versions, please review the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) for the version of the IBM Ansible z/OS core installed. Copyright ========= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 899014cd9..4e2979ebb 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -6,7 +6,11 @@ plugins: callback: {} cliconf: {} connection: {} - filter: {} + filter: + filter_wtor_messages: + description: Filter a list of WTOR messages + name: filter_wtor_messages + version_added: 1.2.0 httpapi: {} inventory: {} lookup: {} @@ -131,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.9.0-beta.1 +version: 1.9.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index c05af6436..a8404bf84 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1176,6 +1176,76 @@ releases: name: zos_script namespace: '' release_date: '2023-10-24' + 1.9.0: + changes: + bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). + - zos_apf - When operation=list was selected and more than one data set entry + was fetched, the module only returned one data set. Fix now returns the complete + list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). + - zos_data_set - Fixes a small parsing bug in module_utils/data_set function + which extracts volume serial(s) from a LISTCAT command output. Previously + a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). + - zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained + JCLERROR followed by an integer where the integer appeared to be a reason + code when actually it is a multi line marker used to coordinate errors spanning + more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned + for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when a response was returned, it contained an undocumented + property; ret_code[msg_text]. Now when a response is returned, it correctly + returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=copy was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=hold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=jchhold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=scan was used in JCL, it would fail the module. + Now typrun=scan no longer fails the module and an appropriate message is returned + with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when wait_time_s was used, the duration would run approximately + 5 second longer than reported in the duration. Now the when duration is returned, + it is the actual accounting from when the job is submitted to when the module + reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + major_changes: + - zos_job_submit - when job statuses were read, were limited to AC (active), + CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC + (security error), JCLERROR (job had a jcl error). Now the additional statuses + are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter + error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + release_summary: 'Release Date: ''2024-03-11'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1120-bugfix-zos_job_submit-default_value.yml + - 1236-bugfix-zos_apf-return-list.yml + - 1246-bugfix-zos_job_submit-typrun.yml + - 1247-volser-parsing-leading-dash-bugfix.yml + - 1288-job-submit-non-utf8-chars.yml + - 1292-doc-zos_tso_command-example.yml + - 1294-doc-zos_ping-scp.yml + - 1296-doc-sftp-collection-requirements.yml + - v1.9.0_summary.yml + release_date: '2024-03-16' 1.9.0-beta.1: changes: bugfixes: diff --git a/docs/source/filters.rst b/docs/source/filters.rst index 51e3a034f..bbf24c6d4 100644 --- a/docs/source/filters.rst +++ b/docs/source/filters.rst @@ -5,13 +5,9 @@ Filters ======= -Filters in Ansible are from Jinja2, and are used to transform data inside -a template expression. The templates operate on the Ansible controller, and not -on the target host. Therefore, filters execute on the controller as they augment -the data locally. - -Jinja2 ships with many filters as does Ansible, and also allows users to add -their own custom filters. +Filters are used to transform data inside a template expression. The templates +operate on the Ansible controller, not on the managed node. Therefore, +filters execute on the controller as they augment the data locally. The **IBM z/OS core collection** includes filters and their usage in sample playbooks. Unlike collections that can be identified at the top level using the diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 525c7c0be..fe93474f0 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -342,7 +342,7 @@ Examples # Simple archive - name: Archive file into a tar zos_archive: - path: /tmp/archive/foo.txt + src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: name: tar @@ -350,7 +350,7 @@ Examples # Archive multiple files - name: Compress list of files into a zip zos_archive: - path: + src: - /tmp/archive/foo.txt - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip @@ -360,7 +360,7 @@ Examples # Archive one data set into terse - name: Compress data set into a terse zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -368,7 +368,7 @@ Examples # Use terse with different options - name: Compress data set into a terse, specify pack algorithm and use adrdssu zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -379,7 +379,7 @@ Examples # Use a pattern to store - name: Compress data set pattern using xmit zos_archive: - path: "USER.ARCHIVE.*" + src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index cc6c60d66..d70efc7a1 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -200,6 +200,15 @@ hlq | **type**: str +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup data sets. + + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + Examples diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 86a3a9463..00e274b00 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -91,6 +91,8 @@ dest If ``dest`` is a nonexistent USS file, it will be created. + If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the copy will fail. + If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. @@ -787,9 +789,9 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option executable that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will be responded with a (FSUM8976,./zos_copy.html) error. + Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 70e798a08..0ea34875f 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -97,7 +97,7 @@ type ``MEMBER`` expects to be used with an existing partitioned data set. - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str @@ -139,7 +139,7 @@ space_type record_format The format of the data set. (e.g ``FB``) - Choices are case-insensitive. + Choices are case-sensitive. When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. @@ -370,7 +370,7 @@ batch ``MEMBER`` expects to be used with an existing partitioned data set. - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str @@ -412,7 +412,7 @@ batch record_format The format of the data set. (e.g ``FB``) - Choices are case-insensitive. + Choices are case-sensitive. When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. @@ -568,7 +568,7 @@ Examples - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: seq + type: SEQ state: present - name: Create a PDS data set if it does not exist @@ -577,26 +577,26 @@ Examples type: pds space_primary: 5 space_type: M - record_format: fba + record_format: FBA record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 volumes: "222222" replace: yes @@ -604,19 +604,19 @@ Examples - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: esds + type: ESDS - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: ksds + type: KSDS key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: rrds + type: RRDS sms_storage_class: mydata - name: Delete a data set if it exists @@ -661,7 +661,7 @@ Examples type: PDS space_primary: 5 space_type: M - record_format: fb + record_format: FB replace: yes - name: someds.name.here1(member1) type: MEMBER diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 21b573a2a..87a50a65a 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -204,7 +204,7 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - `zos_fetch <./zos_fetch.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 6cff37a6a..8f4dda61b 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -16,9 +16,8 @@ zos_job_submit -- Submit JCL Synopsis -------- -- Submit JCL from a data set, USS, or from the controller. -- Submit a job and optionally monitor for completion. -- Optionally, wait a designated time until the job finishes. +- Submit JCL in a data set, USS file, or file on the controller. +- Submit a job and monitor for completion. - For an uncataloged dataset, specify the volume serial number. @@ -57,18 +56,6 @@ location | **choices**: DATA_SET, USS, LOCAL -wait - Setting this option will yield no change, it is deprecated. There is no no need to set *wait*; setting *wait_times_s* is the correct way to configure the amount of tme to wait for a job to execute. - - Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. - - See option *wait_time_s*. - - | **required**: False - | **type**: bool - | **default**: False - - wait_time_s Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. @@ -333,6 +320,8 @@ Notes .. note:: For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + @@ -689,24 +678,46 @@ jobs } msg - Return code resulting from the job submission. Jobs that take longer to assign a value can have a value of '?'. + Job status resulting from the job submission. + + Job status `ABEND` indicates the job ended abnormally. + + Job status `AC` indicates the job is active, often a started task or job taking long. + + Job status `CAB` indicates a converter abend. + + Job status `CANCELED` indicates the job was canceled. + + Job status `CNV` indicates a converter error. + + Job status `FLU` indicates the job was flushed. + + Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + + Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + + Job status `SYS` indicates a system failure. + + Job status `?` indicates status can not be determined. | **type**: str - | **sample**: CC 0000 + | **sample**: AC msg_code - Return code extracted from the `msg` so that it can be evaluated as a string. Jobs that take longer to assign a value can have a value of '?'. + The return code from the submitted job as a string. | **type**: str msg_txt - Returns additional information related to the job. Jobs that take longer to assign a value can have a value of '?'. + Returns additional information related to the submitted job. | **type**: str - | **sample**: The job completion code (CC) was not available in the job output, please review the job log." + | **sample**: The job JOB00551 was run with special job processing TYPRUN=SCAN. This will result in no completion, return code or job steps and changed will be false. code - Return code converted to an integer value (when possible). For JCL ERRORs, this will be None. + The return code converted to an integer value when available. + + Jobs which have no return code will return NULL, such is the case of a job that errors or is active. | **type**: int diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index f51096361..31b237588 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -296,9 +296,7 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine. - - `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index f3cdb0254..4af6b1b52 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -72,7 +72,7 @@ Examples - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 - - name: Execute TSO command to run explicitly a REXX script from a data set. + - name: Execute TSO command to run a REXX script explicitly from a data set. zos_tso_command: commands: - EXEC HLQ.DATASET.REXX exec diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index da80bd31a..91fa597ee 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -362,14 +362,14 @@ Examples # Simple extract - name: Copy local tar file and unpack it on the managed z/OS node. zos_unarchive: - path: "./files/archive_folder_test.tar" + src: "./files/archive_folder_test.tar" format: name: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: - path: "/tmp/test.bz2" + src: "/tmp/test.bz2" format: name: bz2 include: @@ -378,7 +378,7 @@ Examples # Use exclude - name: Unarchive a terse data set and excluding data sets from unpacking. zos_unarchive: - path: "USER.ARCHIVE.RESULT.TRS" + src: "USER.ARCHIVE.RESULT.TRS" format: name: terse exclude: @@ -388,7 +388,7 @@ Examples # List option - name: List content from XMIT zos_unarchive: - path: "USER.ARCHIVE.RESULT.XMIT" + src: "USER.ARCHIVE.RESULT.XMIT" format: name: xmit format_options: @@ -404,6 +404,8 @@ Notes .. note:: VSAMs are not supported. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + See Also @@ -411,7 +413,7 @@ See Also .. seealso:: - - :ref:`zos_unarchive_module` + - :ref:`zos_archive_module` @@ -420,14 +422,14 @@ Return Values ------------- -path - File path or data set name unarchived. +src + File path or data set name unpacked. | **returned**: always | **type**: str dest_path - Destination path where archive was extracted. + Destination path where archive was unpacked. | **returned**: always | **type**: str diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index 5c8605ad3..ef0f6c183 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -5,30 +5,33 @@ Plugins ======= -Plugins that come with the **IBM z/OS core collection** augment Ansible's core +Plugins that come with the **IBM z/OS core collection** complement Ansible's core functionality. Ansible uses a plugin architecture to enable a rich, flexible and expandable feature set. Action ------ -* ``zos_ping``: Manages the REXX source transferred to the z/OS managed node for - `zos_ping`_. -* ``zos_copy``: Used to `copy data`_ from the controller to the z/OS managed - node. -* ``zos_fetch``: Used to `fetch data`_ from the z/OS managed node to the - controller. -* ``zos_job_submit``: Used to `submit a job`_ from the controller and optionally - monitor the job completion. +Action plugins integrate local processing and local data with module functionality. +Action plugins are executed by default when an associated module is used; no additional +user action is required, this documentation is reference only. -.. _normal: - https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/action/normal.py -.. _zos_ping: - modules/zos_ping.html -.. _copy data: +* `zos_copy`_: Used to copy data from the controller to the z/OS manage node. +* `zos_fetch`_: Used to fetch data from the z/OS managed node to the controller. +* `zos_job_submit`_: Used to submit a job from the controller to the z/OS manage node. +* `zos_ping`_: Used to transfer the modules REXX source to the z/OS managed node. +* `zos_script`_: Used to transfer scripts from the controller to the z/OS manage node. +* `_zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. + +.. _zos_copy: modules/zos_copy.html -.. _fetch data: +.. _zos_fetch: modules/zos_fetch.html -.. _submit a job: +.. _zos_job_submit: modules/zos_job_submit.html - +.. _zos_ping: + modules/zos_ping.html +.. _zos_script: + modules/zos_script.html +.. _zos_unarchive: + modules/zos_unarchive.html diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 726c1b64c..7c2c3a929 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,13 +1,22 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021, 2023 . +.. © Copyright IBM Corporation 2020, 2024 . .. ........................................................................... ======== Releases ======== -Version 1.9.0-beta.1 -==================== +Version 1.9.0 +============= + +Major Changes +------------- + - IBM Ansible z/OS core collection (**ibm_zos_core**) version 1.9.0 will be the last release to support ZOAU 1.2.x. + + - IBM Ansible z/OS core version 1.9.0 will continue to receive security updates and bug fixes. + + - Starting with IBM Ansible z/OS core version 1.10.0, ZOAU version 1.3.0 will be required. + - IBM Open Enterprise SDK for Python version 3.9.x is no longer supported. Minor Changes ------------- @@ -21,7 +30,24 @@ Minor Changes - Improved messages in the action plugin. - Improved the action plugin performance, flow and use of undocumented variables. - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. -- ``zos_tso_command`` - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. + - Improved job status support, now the supported statuses for property **ret_code[msg]** are: + + - Job status **ABEND** indicates the job ended abnormally. + - Job status **AC** indicates the job is active, often a started task or job taking long. + - Job status **CAB** indicates a converter abend. + - Job status **CANCELED** indicates the job was canceled. + - Job status **CNV** indicates a converter error. + - Job status **FLU** indicates the job was flushed. + - Job status **JCLERR** or **JCL ERROR** indicates the JCL has an error. + - Job status **SEC** or **SEC ERROR** indicates the job as encountered a security error. + - Job status **SYS** indicates a system failure. + - Job status **?** indicates status can not be determined. + +- ``zos_tso_command`` + + - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. + - Has been updated with a new example demonstrating how to chain multiple TSO commands into one invocation using semicolons. + - ``zos_mvs_raw`` - Has been enhanced to ensure that **instream-data** for option **dd_input** contain blanks in columns 1 and 2 while retaining a maximum length @@ -33,40 +59,69 @@ Minor Changes Bugfixes -------- +- ``zos_apf`` - Fixed an issue that when **operation=list** was selected and more than one data set entry was fetched, only one + data set was returned, now the complete list is returned. + - ``zos_copy`` - - Fixed an issue when copying an aliased executable from a data set to a non-existent data set, the destination data sets primary - and secondary extents would not match the source data set extent sizes. + - Fixed an issue that when copying an aliased executable from a data set to a non-existent data set, the destination + datasets primary and secondary extents would not match the source data set extent sizes. - Fixed an issue when performing a copy operation to an existing file, the copied file resulted in having corrupted contents. -- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix +- ``zos_job_submit`` + + - Fixed an issue that when no **location** is set, the default is not correctly configured to **location=DATA_SET**. + - Fixed an issue that when a JCL error is encountered, the **ret_code[msg_code]** no longer will contain the multi line marker used to coordinate errors. + - Fixed an issue that when a response was returned, the property **ret_code[msg_text]** was incorrectly returned over **ret_code[msg_txt]**. + - Fixed an issue that when JCL contained **TYPRUN=SCAN**, the module would fail. The module no longer fails and an appropriate message and response is returned. + - Fixed an issue that when JCL contained either **TYPRUN=COPY**, **TYPRUN=HOLD**, or **TYPRUN=JCLHOLD** an improper message was returned and the job submission failed. + Now the job will fail under the condition that the module has exceeded its wait time and return a proper message. + - Fixed an issue where when option **wait_time_s** was used, the duration would be approximately 5 seconds longer than what was reported in the duration. + Now the duration is from when the job is submitted to when the module reads the job output. + +- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters, would result in a traceback. The fix supports shorter job IDs as well as the use of wildcards. -- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix +- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters, would result in a traceback. The fix supports shorter job IDs as well as the use of wildcards. - ``zos_unarchive`` - - Fixed an issue when using a local file with the USS format option that would fail sending it to the managed node. - - Fixed an issue that occurred when unarchiving USS files that would leave temporary files behind on the managed node. + - Fixed an issue that when using a local file with the USS format option, the module would fail to send the archive to the managed node. + - Fixed an issue that occurred when unarchiving USS files, the module would leave temporary files behind on the managed node. + +- ``module_utils`` + + - ``job.py`` - Improved exception handling and added a message inside the **content** of the **ddname** when a non-printable + character (character that can not be converted to UTF-8) is encountered. + - ``data_set.py`` - Fixed an issue that when a volser name less than 6 characters was encountered, the volser name was padded with hyphens to have length 6. + Known Issues ------------ Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. -This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, -``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is -unique, some options to work around the error are below. +- This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, + ``zos_operator_action_query``` but are not limited to this list. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6**. +- If the appropriate level of ZOAU can not be installed, some options are to: -- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. -- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. -- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with - a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended +and documented **space_primary** option. + +In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, +this is so that the collection can continue to maintain certified status. Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ @@ -75,7 +130,7 @@ Reference * Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ * Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. Version 1.8.0 @@ -978,6 +1033,8 @@ Known issues https://www.ibm.com/docs/en/python-zos/3.10 .. _3.11: https://www.ibm.com/docs/en/python-zos/3.11 +.. _3.12: + https://www.ibm.com/docs/en/python-zos/3.12 .. _Z Open Automation Utilities 1.1.0: https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: diff --git a/galaxy.yml b/galaxy.yml index 93af5d038..c408424aa 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.9.0-beta.1 +version: 1.10.0-beta.1 # Collection README file readme: README.md @@ -96,4 +96,5 @@ build_ignore: - tests/sanity/ignore-2.11.txt - tests/sanity/ignore-2.12.txt - tests/sanity/ignore-2.13.txt + - tests/sanity/ignore-2.14.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index abab47f9c..7e24bc280 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,10 +1,10 @@ name: ibm_zos_core -version: "1.9.0-beta.1" +version: "1.10.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" - version: ">=3.9" + version: ">=3.10" - name: "Z Open Automation Utilities" version: - - "1.2.5" + - "1.3.0" diff --git a/meta/runtime.yml b/meta/runtime.yml index be99ccf4b..898ad8ff5 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.14.0' +requires_ansible: '>=2.15.0' diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py index 28e908376..17b530218 100644 --- a/plugins/filter/wtor.py +++ b/plugins/filter/wtor.py @@ -12,6 +12,61 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type + +DOCUMENTATION = r""" +name: filter_wtor_messages +author: Demetrios Dimatos (@ddimatos) +version_added: "1.2.0" +short_description: Filter a list of WTOR messages +description: + - Filter a list of WTOR (write to operator with reply) messages found by + module zos_operator_action_query. + - Filter using a string or regular expression. +options: + wtor_response: + description: + - A list containing response property `message_text`, provided the + module zos_operator_action_query. + - The list can be the outstanding messages found in the modules + response under the `actions` property or the entire module + response. + type: list + required: true + text: + description: + - String of text to match or a regular expression to use as filter criteria. + type: str + required: true + ingore_case: + description: + - Should the filter enable case sensitivity when performing a match. + type: bool + required: false + default: false +""" + +EXAMPLES = r""" +- name: Filter actionable messages that match 'IEE094D SPECIFY OPERAND' and if so, set is_specify_operand = true. + set_fact: + is_specify_operand: "{{ result | ibm.ibm_zos_core.filter_wtor_messages('IEE094D SPECIFY OPERAND') }}" + when: result is defined and not result.failed + +- name: Evaluate if there are any existing dump messages matching 'IEE094D SPECIFY OPERAND' + assert: + that: + - is_specify_operand is defined + - bool_zos_operator_action_continue + success_msg: "Found 'IEE094D SPECIFY OPERAND' message." + fail_msg: "Did not find 'IEE094D SPECIFY OPERAND' message." +""" + +RETURN = r""" + _value: + description: A list containing dictionaries matching the WTOR. + type: list + elements: dict +""" + import re diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1f49a2b26..25483b45d 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -21,7 +21,7 @@ # Only importing this module so we can catch a JSONDecodeError that sometimes happens # when a job's output has non-printable chars that conflict with JSON's control # chars. -from json import decoder +from json import JSONDecodeError from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) @@ -29,6 +29,12 @@ ZOAUImportError ) +try: + from zoautil_py import exceptions +except ImportError: + exceptions = ZOAUImportError(traceback.format_exc()) + + try: # For files that import individual functions from a ZOAU module, # we'll replace the imports to instead get the module. @@ -40,6 +46,18 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) +JOB_ERROR_STATUSES = frozenset(["ABEND", # ZOAU job ended abnormally + "SEC ERROR", # Security error (legacy Ansible code) + "SEC", # ZOAU security error + "JCL ERROR", # Job had a JCL error (legacy Ansible code) + "JCLERR", # ZOAU job had a JCL error + "CANCELED", # ZOAU job was cancelled + "CAB", # ZOAU converter abend + "CNV", # ZOAU converter error + "SYS", # ZOAU system failure + "FLU" # ZOAU job was flushed + ]) + def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -89,11 +107,6 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru start_time=start_time ) - # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): - # current_time = timer() - # duration = round(current_time - start_time) - # sleep(1) - if len(job_detail) == 0: # some systems have issues with "*" while some require it to see results job_id = "" if job_id == "*" else job_id @@ -238,17 +251,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # Preserve the original job_id for the failure path job_id_temp = job_id - # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] - # e.g.: OMVSADM HELLO JOB00126 JCLERR ? - # jobs.listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not - # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers - # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] - # creationdatetime=job[9] queueposition=job[10] - # starting in zoau 1.2.4, program_name[11] was added. In 1.3.0, include_extended - # has to be set to true so we get the program name for a job. - # Testing has shown that the program_name impact is minor, so we're removing that option - final_entries = [] + + # In 1.3.0, include_extended has to be set to true so we get the program name for a job. entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) while ((entries is None or len(entries) == 0) and duration <= timeout): @@ -276,25 +281,17 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["system"] = "" job["owner"] = entry.owner - job["ret_code"] = dict() - # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'. - # This new way of constructing msg allows for a better empty message. - # "" instead of "None None". - job["ret_code"]["msg"] = "{0} {1}".format( - entry.status if entry.status else "", - entry.return_code if entry.return_code else "" - ).strip() - + job["ret_code"] = {} + job["ret_code"]["msg"] = entry.status job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None if entry.return_code and len(entry.return_code) > 0: if entry.return_code.isdigit(): job["ret_code"]["code"] = int(entry.return_code) - job["ret_code"]["msg_text"] = entry.status if entry.status else "?" + job["ret_code"]["msg_txt"] = entry.status - # Beginning in ZOAU v1.3.0, the Job class changes svc_class to - # service_class. + # Beginning in ZOAU v1.3.0, the Job class changes svc_class to service_class. job["svc_class"] = entry.service_class job["job_class"] = entry.job_class job["priority"] = entry.priority @@ -310,16 +307,45 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = jobs.list_dds(entry.job_id) - while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): + # If true, it means the job is not ready for DD queries and the duration and + # timeout should apply here instructing the user to add more time + is_dd_query_exception = False + is_jesjcl = False + list_of_dds = [] + + try: + list_of_dds = jobs.list_dds(entry.job_id) + except exceptions.DDQueryException as err: + if 'BGYSC5201E' in str(err): + is_dd_query_exception = True + pass + + # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. + # Idea here is to force a TYPRUN{HOLD|JCLHOLD|COPY} job to go the full wait duration since we have + # currently no way to detect them, but if we know the job is one of the JOB_ERROR_STATUS lets + # exit the wait time supplied as we know it is a job failure. + is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False + is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False + + while ((list_of_dds is None or len(list_of_dds) == 0 or is_dd_query_exception) and + (not is_jesjcl and not is_job_error_status and duration <= timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = jobs.list_dds(entry.job_id) + try: + # Note, in the event of an exception, eg job has TYPRUN=HOLD + # list_of_dds will still be populated with valuable content + list_of_dds = jobs.list_dds(entry.job_id) + is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False + is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False + except exceptions.DDQueryException as err: + if 'BGYSC5201E' in str(err): + is_dd_query_exception = True + continue job["duration"] = duration - for single_dd in list_of_dds: + dd = {} if "dd_name" not in single_dd: @@ -360,23 +386,24 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "step_name" in single_dd: if "dd_name" in single_dd: - # In case ZOAU fails when reading the job output, we'll - # add a message to the user telling them of this. - # ZOAU cannot read partial output from a job, so we - # have to make do with nothing from this step if it fails. + # In case ZOAU fails when reading the job output, we'll add a + # message to the user telling them of this. ZOAU cannot read + # partial output from a job, so we have to make do with nothing + # from this step if it fails. try: tmpcont = jobs.read_output( entry.job_id, single_dd["step_name"], single_dd["dd_name"] ) - except (UnicodeDecodeError, decoder.JSONDecodeError): + except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) as e: tmpcont = ( "Non-printable UTF-8 characters were present in this output. " - "Please access it manually." + "Please access it from the job log." ) dd["content"] = tmpcont.split("\n") + job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) job["ddnames"].append(dd) @@ -397,16 +424,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["subsystem"] = (tmptext.split("\n")[ 0]).replace(" ", "") - # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " - # then further reduce down to: 'JCL ERROR 029' - if job["ret_code"]["msg_code"] == "?": - if "JOB NOT RUN -" in tmpcont: - tmptext = tmpcont.split( - "JOB NOT RUN -")[1].split("\n")[0] - job["ret_code"]["msg"] = tmptext.strip() - job["ret_code"]["msg_code"] = None - job["ret_code"]["code"] = None - final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") @@ -439,3 +456,25 @@ def _ddname_pattern(contents, resolve_dependencies): ) ) return str(contents) + + +def search_dictionaries(key, value, list_of_dictionaries): + """ Searches a list of dictionaries given key and returns + the value dictionary. + + Arguments: + key {str} -- dictionary key to search for. + value {str} -- value to match for the dictionary key + list {str} -- list of dictionaries + + Returns: + dictionary -- dictionary matching the key and value + + Raises: + TypeError -- When input is not a list of dictionaries + """ + if not isinstance(list_of_dictionaries, list): + raise TypeError( + "Unsupported type for 'list_of_dictionaries', must be a list of dictionaries") + + return [element for element in list_of_dictionaries if element[key] == value] diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6991c4d81..9acb3c1c6 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -94,6 +94,10 @@ - C(dest) can be a USS file, directory or MVS data set name. - If C(dest) has missing parent directories, they will be created. - If C(dest) is a nonexistent USS file, it will be created. + - If C(dest) is a new USS file or replacement, the file will be appropriately tagged with + either the system's default locale or the encoding option defined. If the USS file is + a replacement, the user must have write authority to the file either through ownership, + group or other permissions, else the module will fail. - If C(dest) is a nonexistent data set, it will be created following the process outlined here and in the C(volume) option. - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of @@ -467,15 +471,16 @@ - VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) for the underlying - transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, - you can exempt the Ansible userid on z/OS from using Co:Z thus falling back - to using standard SFTP. - - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member - into a PDSE that contains program objects. You can control this behavior using module option - executable that will signify an executable is being copied into a PDSE with other - executables. Mixing data type members with program objects will be responded with a - (FSUM8976,./zos_copy.html) error. + - This module uses SFTP (Secure File Transfer Protocol) for the underlying + transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the + case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling + back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for + transfers, if not available, the module will fail. + - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of + a data type member into a PDSE that contains program objects. You can control this + behavior using module option C(executable) that will signify an executable is being + copied into a PDSE with other executables. Mixing data type members with program + objects will result in a (FSUM8976,./zos_copy.html) error. seealso: - module: zos_fetch - module: zos_data_set diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index dc4bc8071..cc26b622b 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -146,10 +146,11 @@ - Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - L(zos_fetch,./zos_fetch.html) uses SFTP (Secure File Transfer Protocol) for the underlying - transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, - you can exempt the Ansible userid on z/OS from using Co:Z thus falling back - to using standard SFTP. + - This module uses SFTP (Secure File Transfer Protocol) for the underlying + transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the + case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling + back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for + transfers, if not available, the module will fail. seealso: - module: zos_data_set - module: zos_copy diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1fd5030b5..7c66c2543 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -25,9 +25,8 @@ - "Demetrios Dimatos (@ddimatos)" short_description: Submit JCL description: - - Submit JCL from a data set, USS, or from the controller. - - Submit a job and optionally monitor for completion. - - Optionally, wait a designated time until the job finishes. + - Submit JCL in a data set, USS file, or file on the controller. + - Submit a job and monitor for completion. - For an uncataloged dataset, specify the volume serial number. version_added: "1.0.0" options: @@ -126,6 +125,13 @@ notes: - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. """ RETURN = r""" @@ -217,28 +223,40 @@ contains: msg: description: - Return code resulting from the job submission. Jobs that take - longer to assign a value can have a value of '?'. + - Job status resulting from the job submission. + - Job status `ABEND` indicates the job ended abnormally. + - Job status `AC` indicates the job is active, often a started task or job taking long. + - Job status `CAB` indicates a converter abend. + - Job status `CANCELED` indicates the job was canceled. + - Job status `CNV` indicates a converter error. + - Job status `FLU` indicates the job was flushed. + - Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + - Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + - Job status `SYS` indicates a system failure. + - Job status `?` indicates status can not be determined. + - Jobs where status can not be determined will result in None (NULL). type: str - sample: CC 0000 + sample: AC msg_code: description: - Return code extracted from the `msg` so that it can be evaluated - as a string. Jobs that take longer to assign a value can have a - value of '?'. + - The return code from the submitted job as a string. + - Jobs which have no return code will result in None (NULL), such + is the case of a job that errors or is active. type: str sample: 0000 msg_txt: description: - Returns additional information related to the job. Jobs that take - longer to assign a value can have a value of '?'. + - Returns additional information related to the submitted job. + - Jobs which have no additional information will result in None (NULL). type: str - sample: The job completion code (CC) was not available in the job - output, please review the job log." + sample: The job JOB00551 was run with special job processing TYPRUN=SCAN. + This will result in no completion, return code or job steps and + changed will be false. code: description: - Return code converted to an integer value (when possible). - For JCL ERRORs, this will be None. + - The return code converted to an integer value when available. + - Jobs which have no return code will result in None (NULL), such + is the case of a job that errors or is active. type: int sample: 0 steps: @@ -537,15 +555,10 @@ "system": "STL1" } ] -message: - description: This option is being deprecated - returned: success - type: str - sample: Submit JCL operation succeeded. """ EXAMPLES = r""" -- name: Submit JCL in a PDSE member +- name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) location: DATA_SET @@ -597,7 +610,7 @@ BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( - job_output, + job_output, search_dictionaries, JOB_ERROR_STATUSES ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -627,8 +640,10 @@ jobs = ZOAUImportError(traceback.format_exc()) -JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) -JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) +JOB_STATUSES = list(dict.fromkeys(JOB_ERROR_STATUSES)) +JOB_STATUSES.append("CC") + +JOB_SPECIAL_PROCESSING = frozenset(["TYPRUN"]) MAX_WAIT_TIME_S = 86400 @@ -693,23 +708,39 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N # which is what ZOAU sends back, opitonally we can check the 'status' as # that is sent back as `AC` when the job is not complete but the problem # with monitoring 'AC' is that STARTED tasks never exit the AC status. + job_fetched = None + job_fetch_rc = None + job_fetch_status = None + if job_submitted: - job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code - job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status + try: + job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] + job_fetch_rc = job_fetched.return_code + job_fetch_status = job_fetched.status + except zoau_exceptions.JobFetchException: + pass # Before moving forward lets ensure our job has completed but if we see - # status that matches one in JOB_ERROR_MESSAGES, don't wait, let the code - # drop through and get analyzed in the main as it will scan the job ouput. - # Any match to JOB_ERROR_MESSAGES ends our processing and wait times. - while (job_fetch_status not in JOB_ERROR_MESSAGES and + # status that matches one in JOB_STATUSES, don't wait, let the code + # drop through and get analyzed in the main as it will scan the job ouput + # Any match to JOB_STATUSES ends our processing and wait times + while (job_fetch_status not in JOB_STATUSES and job_fetch_status == 'AC' and ((job_fetch_rc is None or len(job_fetch_rc) == 0 or job_fetch_rc == '?') and duration < timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code - job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status + try: + job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] + job_fetch_rc = job_fetched.return_code + job_fetch_status = job_fetched.status + # Allow for jobs that need more time to be fectched to run the wait_time_s + except zoau_exceptions.JobFetchException as err: + if duration >= timeout: + raise err + else: + continue # ZOAU throws a JobSubmitException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed @@ -736,11 +767,12 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N result["stderr"] = to_text(err) result["duration"] = duration result["job_id"] = job_submitted.job_id + _msg_detail = "the job with status {0}".format(job_fetch_status) if job_fetch_status else "its status" result["msg"] = ("The JCL has been submitted {0} with ID {1} but there was an " - "error while fetching its status within the allocated time of {2} " + "error while fetching {2} within the allocated time of {3} " "seconds. Consider using module zos_job_query to poll for the " "job for more information. Standard error may have additional " - "information.".format(src_name, job_submitted.job_id, str(timeout))) + "information.".format(src_name, job_submitted.job_id, _msg_detail, str(timeout))) module.fail_json(**result) # Between getting a job_submitted and the jobs.fetch_multiple(job_submitted.job_id)[0].return_code @@ -882,7 +914,7 @@ def run_module(): if wait_time_s <= 0 or wait_time_s > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option `wait_time_s` is not valid, it must " + result["msg"] = ("The value for option 'wait_time_s' is not valid, it must " "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) @@ -899,29 +931,39 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) - try: - # Explictly pass None for the unused args else a default of '*' will be - # used and return undersirable results - job_output_txt = None + # Explictly pass None for the unused args else a default of '*' will be + # used and return undersirable results + job_output_txt = None + try: job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) + # This is resolvig a bug where the duration coming from job_output is passed by value, duration + # being an immutable type can not be changed and must be returned or accessed from the job.py. + if job_output is not None: + duration = job_output_txt[0].get("duration") if not None else duration + result["duration"] = duration if duration >= wait_time_s: result["failed"] = True result["changed"] = False + _msg = ("The JCL submitted with job id {0} but appears to be a long " + "running job that exceeded its maximum wait time of {1} " + "second(s). Consider using module zos_job_query to poll for " + "a long running job or increase option 'wait_times_s' to a value " + "greater than {2}.".format(str(job_submitted_id), str(wait_time_s), str(duration))) + _msg_suffix = ("Consider using module zos_job_query to poll for " + "a long running job or increase option 'wait_times_s' to a value " + "greater than {0}.".format(str(duration))) + if job_output_txt is not None: result["jobs"] = job_output_txt - result["msg"] = ( - "The JCL submitted with job id {0} but appears to be a long " - "running job that exceeded its maximum wait time of {1} " - "second(s). Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_times_s` to a value " - "greater than {2}.".format( - str(job_submitted_id), str(wait_time_s), str(duration))) + job_ret_code = job_output_txt[0].get("ret_code") + job_ret_code.update({"msg_txt": _msg_suffix}) + result["msg"] = _msg module.exit_json(**result) # Job has submitted, the module changed the managed node @@ -932,35 +974,76 @@ def run_module(): job_ret_code = job_output_txt[0].get("ret_code") if job_ret_code: - job_msg = job_ret_code.get("msg") - job_code = job_ret_code.get("code") - - # retcode["msg"] should never be empty where a retcode["code"] can be None, - # "msg" could be an ABEND which has no corresponding "code" - if job_msg is None: - _msg = ("Unable to find a 'msg' in the 'ret_code' dictionary, " - "please review the job log.") - result["stderr"] = _msg - raise Exception(_msg) + job_ret_code_msg = job_ret_code.get("msg") + job_ret_code_code = job_ret_code.get("code") + job_ret_code_msg_code = job_ret_code.get("msg_code") if return_output is True and max_rc is not None: - is_changed = assert_valid_return_code(max_rc, job_code, job_ret_code) - - if re.search("^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)), job_msg): - # If the job_msg doesn't have a CC, it is an improper completion (error/abend) - if re.search("^(?:CC)", job_msg) is None: - _msg = ("The job completion code (CC) was not in the job log. " - "Please review the error {0} and the job log.".format(job_msg)) - result["stderr"] = _msg + is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, result) + + if job_ret_code_msg is not None: + if re.search("^(?:{0})".format("|".join(JOB_STATUSES)), job_ret_code_msg): + # If the job_ret_code_msg doesn't have a CC (completion code), the job failed. + if re.search("^(?:CC)", job_ret_code_msg) is None: + _msg = ("The job completion code (CC) was not in the job log. " + "please review the job log for status {0}.".format(job_ret_code_msg)) + result["stderr"] = _msg + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) + + if job_ret_code_code is None: + # If there is no job_ret_code_code (Job return code) it may NOT be an error, + # some jobs will never return have an RC, eg Jobs with TYPRUN=*, + # Started tasks (which are not supported) so further analyze the + # JESJCL DD to figure out if its a TYPRUN job + + job_dd_names = job_output_txt[0].get("ddnames") + jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) + + # Its possible jobs don't have a JESJCL which are active and this would + # cause an index out of range error. + if not jes_jcl_dd: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) raise Exception(_msg) - if job_code is None: - raise Exception("The job return code was not available in the job log, " - "please review the job log and error {0}.".format(job_msg)) - - if job_code != 0 and max_rc is None: - raise Exception("The job return code {0} was non-zero in the " - "job output, this job has failed.".format(str(job_code))) + jes_jcl_dd_content = jes_jcl_dd[0].get("content") + jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) + + # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. + special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" + .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) + + if special_processing_keyword: + job_ret_code.update({"msg": special_processing_keyword[0]}) + job_ret_code.update({"code": None}) + job_ret_code.update({"msg_code": None}) + job_ret_code.update({"msg_txt": "The job {0} was run with special job " + "processing {1}. This will result in no completion, " + "return code or job steps and changed will be false." + .format(job_submitted_id, special_processing_keyword[0])}) + is_changed = False + else: + # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated + # so check both and provide a proper response. + + if job_ret_code_msg_code is None: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) + + # raise Exception("The job return code was not available in the job log, " + # "please review the job log and error {0}.".format(job_ret_code_msg)) + elif job_ret_code_code != 0 and max_rc is None: + _msg = ("The job return code {0} was non-zero in the " + "job output, this job has failed.".format(str(job_ret_code_code))) + job_ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) if not return_output: for job in result.get("jobs", []): @@ -975,7 +1058,6 @@ def run_module(): result["stderr"] = _msg result["jobs"] = None raise Exception(_msg) - except Exception as err: result["failed"] = True result["changed"] = False @@ -995,27 +1077,32 @@ def run_module(): module.exit_json(**result) -def assert_valid_return_code(max_rc, job_rc, ret_code): +def assert_valid_return_code(max_rc, job_rc, ret_code, result): if job_rc is None: raise Exception( "The job return code (ret_code[code]) was not available in the jobs output, " "this job has failed.") if job_rc > max_rc: - raise Exception("The job return code, 'ret_code[code]' {0} for the submitted job is " - "greater than the value set for option 'max_rc' {1}. " - "Increase the value for 'max_rc' otherwise this job submission " - "has failed.".format(str(job_rc), str(max_rc))) + _msg = ("The job return code, 'ret_code[code]' {0} for the submitted job is " + "greater than the value set for option 'max_rc' {1}. " + "Increase the value for 'max_rc' otherwise this job submission " + "has failed.".format(str(job_rc), str(max_rc))) + ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) for step in ret_code["steps"]: step_cc_rc = int(step["step_cc"]) step_name_for_rc = step["step_name"] if step_cc_rc > max_rc: - raise Exception("The step name {0} with return code {1} for the submitted job is " - "greater than the value set for option 'max_rc' {2}. " - "Increase the value for 'max_rc' otherwise this job submission " - "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc))) - + _msg = ("The step name {0} with return code {1} for the submitted job is " + "greater than the value set for option 'max_rc' {2}. " + "Increase the value for 'max_rc' otherwise this job submission " + "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc))) + ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) # If there is NO exception rasied it means that max_rc is larger than the # actual RC from the submitted job. In this case, the ansible changed status # should NOT be 'changed=true' even though the user did override the return code, diff --git a/plugins/modules/zos_ping.py b/plugins/modules/zos_ping.py index 6de0cccf0..5f134cd90 100644 --- a/plugins/modules/zos_ping.py +++ b/plugins/modules/zos_ping.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index a881146b0..beca54c3b 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -85,7 +85,7 @@ If (rc <> 0 | returnCode <> HWTJ_OK) Then Do failModule(errmsg, "", retC) End -/* Check for Python version >= 3.8 eg: 'Python 3.10.0' */ +/* Check for Python version >= 3.10 eg: 'Python 3.10.0' */ retC = bpxwunix('python3 --version', out., err.) If (err.0 > 0) Then Do Do index=1 To err.0 diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index b69d70b2d..0677d187d 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -116,11 +116,12 @@ - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to - the remote machine. - - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) - for the underlying transfer protocol; Co:Z SFTP is not supported. In - the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from - using Co:Z thus falling back to using standard SFTP. + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with L(zos_tso_command,./zos_tso_command.html). diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 6c2cb6ef6..17e190fb2 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index e9b17766c..aa315b3fb 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -29,8 +29,6 @@ - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. - - options: src: description: @@ -311,12 +309,17 @@ type: bool required: false default: false - notes: - VSAMs are not supported. - + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. seealso: - - module: zos_unarchive + - module: zos_archive ''' EXAMPLES = r''' diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index ee7b03157..8f6c6e072 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -57,7 +57,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=JDATA_SET_NAME, state="present", type="pds", replace=True + name=JDATA_SET_NAME, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) @@ -90,7 +90,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=NDATA_SET_NAME, state="present", type="pds", replace=True + name=NDATA_SET_NAME, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 394a087ad..bae4dbb36 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -163,7 +163,7 @@ //****************************************************************************** //* Job containing a non existent DSN that will force an error. //* Returns: -//* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) +//* ret_code->(code=null, msg=JCLERR, msg_txt=JCLERR, msg_code=None) //* msg --> The JCL submitted with job id JOB00532 but there was an error, //* please review the error for further details: The job completion //* code (CC) was not in the job log. Please review the error @@ -198,7 +198,7 @@ //* Another job containing no job card resulting in a JCLERROR with an value. It //* won't always be 952, it will increment. //* Returns: -//* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) +//* ret_code->(code=null, msg=JCLERR, msg_text=JCLERR, msg_code=null) //* msg --> The JCL submitted with job id JOB00728 but there was an error, //* please review the error for further details: The job completion //* code (CC) was not in the job log. Please review the error @@ -214,11 +214,11 @@ //* Job containing a USER=FOOBAR that will cause JES to return a SEC ERROR which //* is a security error. //* Returns: -//* ret_code->(code=null, msg=SEC ?, msg_text=SEC, msg_code=?) -//* msg --> The JCL submitted with job id JOB00464 but there was an error, +//* ret_code->(code=None, msg=SEC, msg_txt=<msg>, msg_code=?) +//* msg --> The JCL submitted with job id JOB01062 but there was an error, //* please review the error for further details: The job return code -//* was not available in the job log, please review the job log -//* and error SEC ?.", +//* was not available in the job log, please review the job log and +//* status SEC. //****************************************************************************** //INVUSER JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,USER=FOOBAR @@ -234,22 +234,102 @@ JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* //****************************************************************************** -//* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and -//* not actually run the JCL. +//* Job containing a TYPRUN=SCAN will cause JES to run a syntax check and +//* not actually run the JCL. The job will be put on the H output queue, DDs +//* JESJCL and JESMSGLG are available. Ansible considers this a passing job. //* Returns: -//* ret_code->(code=null, msg=? ?, msg_text=?, msg_code=?) -//* msg --> The JCL submitted with job id JOB00620 but there was an error, -//* please review the error for further details: The job return code -//* was not available in the job log, please review the job log -//* and error ? ?.", +//* ret_code->(code=null, msg=TYPRUN=SCAN, msg_txt=<msg>, msg_code=null) +//* msg --> The job JOB00551 was run with special job processing TYPRUN=SCAN. +//* This will result in no completion, return code or job steps and +//* changed will be false." //****************************************************************************** -//TYPESCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, -// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN +//SCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY //SYSPRINT DD SYSOUT=* //SYSUT1 DD * -HELLO, WORLD +HELLO, WORLD. SCAN OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_COPY = """//* +//****************************************************************************** +//* Job containing a TYPRUN=COPY will cause JES to copy the input job +//* (source content) stream directly to a sysout data set (device specified in +//* the message class parameter (H)) and schedule it for output processing, in +//* other words, the job will be put on the H output queue; DD's +//* JESMSGLG and JESJCLIN are available. Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=<msg>, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//COPY JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=COPY +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. COPY OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_HOLD = """//* +//****************************************************************************** +//* Job containing a TYPRUN=HOLD will cause JES to hold this JCL without +//* executing it until a special event occurs at which time, the operator will +//* release the job from HOLD and allow the job to continue processing. +//* Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=<msg>, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//HOLD JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=HOLD +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. HOLD OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_JCLHOLD = """//* +//****************************************************************************** +//* Job containing a TYPRUN=JCLHOLD will cause JES to will keep the submitted +//* job in the input queue until it's released by an operator or by the default +//* time assigned to the class parameter. As the operator you enter 'A' or 'R' +//* to release it from the queue. +//* Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=<msg>, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//JCLHOLD JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=JCLHOLD +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. JCLHOLD OPERATION /* //SYSUT2 DD SYSOUT=* // @@ -342,9 +422,11 @@ def test_job_submit_PDS(ansible_zos_module, location): hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) + hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) + hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) @@ -362,8 +444,8 @@ def test_job_submit_PDS(ansible_zos_module, location): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_special_characters(ansible_zos_module): @@ -374,7 +456,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True + name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( @@ -465,7 +547,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 + name=data_set_name, state="present", type="PDS", replace=True, volumes=volume_1 ) hosts.all.shell( @@ -473,7 +555,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="uncataloged", type="pds" + name=data_set_name, state="uncataloged", type="PDS" ) results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) @@ -498,7 +580,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -531,7 +613,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -564,7 +646,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -734,43 +816,113 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="LOCAL") + import pprint for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False assert re.search(r'completion code', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None -# Should have a JCL ERROR <int> def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error SEC', repr(result.get("msg"))) + assert re.search(r'please review the error for further details', repr(result.get("msg"))) + assert re.search(r'please review the job log for status SEC', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + assert re.search(r'please review the job log for status SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) -def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): +def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'run with special job processing TYPRUN=SCAN', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "TYPRUN=SCAN" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + import pprint + for result in results.contacted.values(): + pprint.pprint(result) + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'please review the job log', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") is None + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error ? ?', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" + assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None # This test case is related to the following GitHub issues: @@ -807,4 +959,4 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") \ No newline at end of file diff --git a/tests/unit/test_zoau_version_checker_unit.py b/tests/unit/test_zoau_version_checker_unit.py index 96031f4a1..15bcce58b 100644 --- a/tests/unit/test_zoau_version_checker_unit.py +++ b/tests/unit/test_zoau_version_checker_unit.py @@ -45,10 +45,24 @@ (['1','2','1'], "2022/08/17 21:25:13 CUT V1.2.1"), (['1','2','1'], "2022/08/25 21:44:21 CUT V1.2.1 31163ab 1856"), (['1','2','1'], "2022/09/07 15:26:50 CUT V1.2.1 d2f6557 1880"), + (['1','2','1','1'], ""), (['1','2','3'], "2022/12/03 13:33:22 CUT V1.2.3 6113dc9 2512"), (['1','2','2'], "2022/12/06 20:44:00 CUT V1.2.2 ee30137 2525"), (['1','2','3'], "2023/03/16 18:17:00 CUT V1.2.3 1aa591fb 2148 PH50145"), - (['1', '2', '4', '0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + (['1','2','3','1'], ""), + (['1','2','3','2'], ""), + (['1','2','4','0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + (['1','2','4','1'], ""), + (['1','2','4','2'], ""), + (['1','2','4','3'], ""), + (['1','2','4','4'], ""), + (['1','2','4','5'], ""), + (['1','2','5','0'], ""), + (['1','2','5','1'], ""), + (['1','2','5','2'], ""), + (['1','2','5','3'], ""), + (['1','2','5','4'], ""), + (['1','2','5','6'], ""), ] From 5f743e6df0c97378c1215c10950143108c2fff21 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 2 Apr 2024 17:36:46 -0600 Subject: [PATCH 335/413] Enabler/add ansible sanity action (#1313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Create bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Added changelog action * Update changelog.yml * Create close-stale-issues * Update close-stale-issues Quite el workflow dispatch * Create bandit2.yml * Update bandit2.yml * Update zos_copy.py * Update zos_copy.py Me equivoque * Create ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Added ac changelog * added lint as an option * Added documentation to ac_changelog * Changed 'lint' to 'command' on ac_changelog * Create * Create first version of the changelog action * Update changelog.yml * Fix changelog.yml * Change name of action Antsibull 'Changelog lint' to AC Changelog lint * Rename 'changelog.yml' to 'ac_changelog.yml * Create ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Change path in 'venv setup' on ac * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Removed not required github actions * Update zos_copy.py * Update ac_changelog.yml * Create 'ac-ansible-test.yml' * Test * Delete test changelog * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix paths * Delete commented lines * Delete weird changes * Delete weird changes * Update ac-ansible-test-sanity.yml --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .github/workflows/ac-ansible-test-sanity.yml | 71 ++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 .github/workflows/ac-ansible-test-sanity.yml diff --git a/.github/workflows/ac-ansible-test-sanity.yml b/.github/workflows/ac-ansible-test-sanity.yml new file mode 100644 index 000000000..1354195a5 --- /dev/null +++ b/.github/workflows/ac-ansible-test-sanity.yml @@ -0,0 +1,71 @@ +name: AC Ansible sanity + +on: + pull_request: + branches: + - dev + - staging* + paths-ignore: + - '**.tar.gz' + - 'pycache/**' + - '.ansible-lint' + - 'cache/**' + - '.DS_Store' + - '.git/**' + - '.github/**' + - '.gitignore' + - '.python-version' + - '.pytest_cache/**' + - '.vscode/**' + - 'Jenkinsfile' + - 'ac' + - 'ansible.cfg' + - 'changelogs/**' + - 'collections/**' + - 'docs/**' + - 'scripts/**' + - 'test_config.yml' + - 'tests/*.ini' + - 'tests/*.py' + - 'tests/.pytest_cache' + - 'tests/pycache' + - 'tests/functional' + - 'tests/helpers' + - 'tests/requirements.txt' + - 'tests/unit' + - 'tests/sanity/ignore-*' + - 'venv*' + +jobs: + ansible-sanity: + runs-on: ubuntu-latest + env: + branch: ${{ github.event.pull_request.head.ref }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install ansible + + - name: Run ac-sanity + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-build + ./ac --ac-sanity From 3d248c42e09bfb45d0c50938236b50378ed07256 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 2 Apr 2024 17:38:18 -0600 Subject: [PATCH 336/413] [Bugfix][1201]Zos_mvs_raw_ignores_tmp_hlq (#1320) * Add first iteration * Fix mvs_raw * Add another format * Add define * Add parms to avoid fails * Quick fix to not avoid tmphlq * Fix sanity issues * Fix white spaces * Return call of hlq * Add fragment * Fix capital letters * Change fragment * Fix case sensitive data set * Fix not exist dataset * Return dataset * Fix upper case for latest dataset and change of datasize from dtouch * Fix upper case and lower case * Change typo * Fix documentation * Fix not match * Unit testing to uppercase * Fis uppercases in mvs raw * Add uppercase * New problem ID * Remove unnecesary function and add KSDS solution --- .../1320-Zos_mvs_raw_ignores_tmp_hlq.yml | 5 + plugins/module_utils/zos_mvs_raw.py | 6 +- plugins/modules/zos_mvs_raw.py | 260 ++++++++---------- .../modules/test_zos_mvs_raw_func.py | 88 +++--- tests/unit/test_zos_mvs_raw_unit.py | 80 +++--- 5 files changed, 210 insertions(+), 229 deletions(-) create mode 100644 changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml new file mode 100644 index 000000000..058faf66e --- /dev/null +++ b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. + Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created + during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 7c2badf84..466775939 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -24,7 +24,7 @@ class MVSCmd(object): """ @staticmethod - def execute(pgm, dds, parm="", debug=False, verbose=False): + def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an unauthorized MVS command. Args: @@ -36,9 +36,10 @@ def execute(pgm, dds, parm="", debug=False, verbose=False): MVSCmdResponse: The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) - command = "mvscmd {0} {1} {2} ".format( + command = "mvscmd {0} {1} {2} {3}".format( "-d" if debug else "", "-v" if verbose else "", + "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) rc, out, err = module.run_command(command) @@ -64,7 +65,6 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) - rc, out, err = module.run_command(command) return MVSCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 502d2ead7..a440c31c6 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -96,16 +96,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - library - - pds - - pdse - - large - - basic - - seq - - rrds - - esds - - lds - - ksds + - LIBRARY + - PDS + - PDSE + - LARGE + - BASIC + - SEQ + - RRDS + - ESDS + - LDS + - KSDS disposition: description: - I(disposition) indicates the status of a data set. @@ -174,12 +174,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - trk - - cyl - - b - - k - - m - - g + - TRK + - CYL + - B + - K + - M + - G space_primary: description: - The primary amount of space to allocate for a new data set. @@ -325,11 +325,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -505,11 +505,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -717,16 +717,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - library - - pds - - pdse - - large - - basic - - seq - - rrds - - esds - - lds - - ksds + - LIBRARY + - PDS + - PDSE + - LARGE + - BASIC + - SEQ + - RRDS + - ESDS + - LDS + - KSDS disposition: description: - I(disposition) indicates the status of a data set. @@ -795,12 +795,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - trk - - cyl - - b - - k - - m - - g + - TRK + - CYL + - B + - K + - M + - G space_primary: description: - The primary amount of space to allocate for a new data set. @@ -946,11 +946,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -1124,11 +1124,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -1300,13 +1300,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1324,13 +1324,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1369,13 +1369,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1398,15 +1398,15 @@ disposition: new replace: yes backup: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - "111111" - "SCR002" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1628,10 +1628,6 @@ backups = [] -# Use of global tmphlq to keep coherent classes definitions -g_tmphlq = "" - - def run_module(): """Executes all module-related functions. @@ -1651,7 +1647,7 @@ def run_module(): type="str", choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], ), - space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), + space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type="raw"), @@ -1664,16 +1660,16 @@ def run_module(): type=dict( type="str", choices=[ - "library", - "pds", - "pdse", - "seq", - "basic", - "large", - "ksds", - "rrds", - "lds", - "esds", + "LIBRARY", + "PDS", + "PDSE", + "SEQ", + "BASIC", + "LARGE", + "KSDS", + "RRDS", + "LDS", + "ESDS", ], ), encryption_key_1=dict( @@ -1695,7 +1691,7 @@ def run_module(): key_length=dict(type="int", no_log=False), key_offset=dict(type="int", no_log=False), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1770,7 +1766,7 @@ def run_module(): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1839,8 +1835,7 @@ def run_module(): if not module.check_mode: try: parms = parse_and_validate_args(module.params) - global g_tmphlq - g_tmphlq = parms.get("tmp_hlq") + tmphlq = parms.get("tmp_hlq") dd_statements = build_dd_statements(parms) program = parms.get("program_name") program_parm = parms.get("parm") @@ -1852,6 +1847,7 @@ def run_module(): dd_statements=dd_statements, authorized=authorized, verbose=verbose, + tmp_hlq=tmphlq, ) if program_response.rc != 0 and program_response.stderr: raise ZOSRawError( @@ -1894,7 +1890,7 @@ def parse_and_validate_args(params): type="str", choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], ), - space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), + space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type=volumes), @@ -1907,16 +1903,16 @@ def parse_and_validate_args(params): type=dict( type="str", choices=[ - "library", - "pds", - "pdse", - "seq", - "basic", - "large", - "ksds", - "rrds", - "lds", - "esds", + "LIBRARY", + "PDS", + "PDSE", + "SEQ", + "BASIC", + "LARGE", + "KSDS", + "RRDS", + "LDS", + "ESDS", ], ), encryption_key_1=dict( @@ -1940,7 +1936,7 @@ def parse_and_validate_args(params): type=key_offset, default=key_offset_default, dependencies=["type"] ), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1996,7 +1992,7 @@ def parse_and_validate_args(params): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -2088,8 +2084,8 @@ def key_length(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "ksds": - raise ValueError('key_length is only valid when "type=ksds".') + if contents is not None and dependencies.get("type") != "KSDS": + raise ValueError('key_length is only valid when "type=KSDS".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( 'Invalid argument "{0}" for type "key_length".'.format(str(contents)) @@ -2109,8 +2105,8 @@ def key_offset(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "ksds": - raise ValueError('key_offset is only valid when "type=ksds".') + if contents is not None and dependencies.get("type") != "KSDS": + raise ValueError('key_offset is only valid when "type=KSDS".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( @@ -2131,9 +2127,9 @@ def key_length_default(contents, dependencies): """ KEY_LENGTH = 5 length = None - if contents is None and dependencies.get("type") == "ksds": + if contents is None and dependencies.get("type") == "KSDS": length = KEY_LENGTH - elif dependencies.get("type") == "ksds": + elif dependencies.get("type") == "KSDS": length = contents return length @@ -2149,9 +2145,9 @@ def key_offset_default(contents, dependencies): """ KEY_OFFSET = 0 offset = None - if contents is None and dependencies.get("type") == "ksds": + if contents is None and dependencies.get("type") == "KSDS": offset = KEY_OFFSET - elif dependencies.get("type") == "ksds": + elif dependencies.get("type") == "KSDS": offset = contents return offset @@ -2408,7 +2404,7 @@ def build_dd_statements(parms): dd_statements = [] for dd in parms.get("dds"): dd_name = get_dd_name(dd) - dd = set_extra_attributes_in_dd(dd) + dd = set_extra_attributes_in_dd(dd, parms) data_definition = build_data_definition(dd) if data_definition is None: raise ValueError("No valid data definition found.") @@ -2444,26 +2440,27 @@ def get_dd_name(dd): return dd_name -def set_extra_attributes_in_dd(dd): +def set_extra_attributes_in_dd(dd, parms): """ - Set any extra attributes in dds like in global g_tmphlq. + Set any extra attributes in dds like in global tmp_hlq. Args: dd (dict): A single DD parm as specified in module parms. Returns: dd (dict): A single DD parm as specified in module parms. """ + tmphlq = parms.get("tmp_hlq") if dd.get("dd_data_set"): - dd.get("dd_data_set")["tmphlq"] = g_tmphlq + dd.get("dd_data_set")["tmphlq"] = tmphlq elif dd.get("dd_input"): - dd.get("dd_input")["tmphlq"] = g_tmphlq + dd.get("dd_input")["tmphlq"] = tmphlq elif dd.get("dd_output"): - dd.get("dd_output")["tmphlq"] = g_tmphlq + dd.get("dd_output")["tmphlq"] = tmphlq elif dd.get("dd_vio"): - dd.get("dd_vio")["tmphlq"] = g_tmphlq + dd.get("dd_vio")["tmphlq"] = tmphlq elif dd.get("dd_concat"): for single_dd in dd.get("dd_concat").get("dds", []): - set_extra_attributes_in_dd(single_dd) + set_extra_attributes_in_dd(single_dd, parms) return dd @@ -2572,6 +2569,7 @@ def __init__( """ self.backup = None self.return_content = ReturnContent(**(return_content or {})) + self.tmphlq = tmphlq primary_unit = space_type secondary_unit = space_type key_label1 = None @@ -2698,7 +2696,6 @@ def __init__( ) -# TODO: potentially extend the available parameters to end user class RawInputDefinition(InputDefinition): """Wrapper around InputDefinition to contain information about desired return contents. @@ -2707,7 +2704,7 @@ class RawInputDefinition(InputDefinition): InputDefinition (InputDefinition): Input DD data type to be used in a DDStatement. """ - def __init__(self, content="", return_content=None, **kwargs): + def __init__(self, content="", return_content=None, tmphlq="", **kwargs): """Initialize RawInputDefinition Args: @@ -2715,7 +2712,7 @@ def __init__(self, content="", return_content=None, **kwargs): return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. """ self.return_content = ReturnContent(**(return_content or {})) - super().__init__(content=content) + super().__init__(content=content, tmphlq=tmphlq) class RawOutputDefinition(OutputDefinition): @@ -2726,7 +2723,7 @@ class RawOutputDefinition(OutputDefinition): OutputDefinition (OutputDefinition): Output DD data type to be used in a DDStatement. """ - def __init__(self, return_content=None, **kwargs): + def __init__(self, return_content=None, tmphlq="", **kwargs): """Initialize RawOutputDefinition Args: @@ -2734,7 +2731,7 @@ def __init__(self, return_content=None, **kwargs): return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. """ self.return_content = ReturnContent(**(return_content or {})) - super().__init__() + super().__init__(tmphlq=tmphlq) class ReturnContent(object): @@ -2761,28 +2758,6 @@ def __init__(self, type=None, src_encoding=None, response_encoding=None): self.response_encoding = response_encoding -def to_bytes(size, unit): - """Convert sizes of various units to bytes. - - Args: - size (int): The size to convert. - unit (str): The unit of size. - - Returns: - int: The size converted to bytes. - """ - num_bytes = 0 - if unit == "b": - num_bytes = size - elif unit == "k": - num_bytes = size * 1024 - elif unit == "m": - num_bytes = size * 1048576 - elif unit == "g": - num_bytes = size * 1073741824 - return num_bytes - - def rename_parms(parms, name_map): """Rename parms based on a provided dictionary. @@ -2839,7 +2814,7 @@ def data_set_exists(name, volumes=None): def run_zos_program( - program, parm="", dd_statements=None, authorized=False, verbose=False + program, parm="", dd_statements=None, authorized=False, verbose=False, tmp_hlq=None ): """Run a program on z/OS. @@ -2848,6 +2823,7 @@ def run_zos_program( parm (str, optional): Additional argument string if required. Defaults to "". dd_statements (list[DDStatement], optional): DD statements to allocate for the program. Defaults to []. authorized (bool, optional): Determines if program will execute as an authorized user. Defaults to False. + tmp_hlq (str, optional): Arguments overwrite variable tmp_hlq Returns: MVSCmdResponse: Holds the response information for program execution. @@ -2857,11 +2833,11 @@ def run_zos_program( response = None if authorized: response = MVSCmd.execute_authorized( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq ) else: response = MVSCmd.execute( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq ) return response diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index fd20a6a92..ca5b6384d 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -62,7 +62,7 @@ def test_disposition_new(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -86,7 +86,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -118,7 +118,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, default_volume = volumes.get_available_vol() default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -133,12 +133,12 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, return_content=dict(type="text"), replace=True, backup=True, - type="seq", + type="SEQ", space_primary=5, space_secondary=1, - space_type="m", + space_type="M", volumes=default_volume, - record_format="fb" + record_format="FB" ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -172,7 +172,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, disposition="new", - type="pds", + type="PDS", directory_blocks=15, return_content=dict(type="text"), ), @@ -197,7 +197,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=default_data_set, type="pds", state="present", replace=True + name=default_data_set, type="PDS", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,7 +234,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -267,11 +267,11 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch @pytest.mark.parametrize( "space_type,primary,secondary,expected", [ - ("trk", 3, 1, 169992), - ("cyl", 3, 1, 2549880), - ("b", 3, 1, 56664), - ("k", 3, 1, 56664), - ("m", 3, 1, 2889864), + ("TRK", 3, 1, 169992), + ("CYL", 3, 1, 2549880), + ("B", 3, 1, 56664), + ("K", 3, 1, 56664), + ("M", 3, 1, 3003192), ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): @@ -288,7 +288,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", space_primary=primary, space_secondary=secondary, space_type=space_type, @@ -315,7 +315,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte @pytest.mark.parametrize( "data_set_type", - ["pds", "pdse", "large", "basic", "seq"], + ["PDS", "PDSE", "LARGE", "BASIC", "SEQ"], ) def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -351,7 +351,7 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s @pytest.mark.parametrize( "data_set_type", - ["ksds", "rrds", "lds", "esds"], + ["KSDS", "RRDS", "LDS", "ESDS"], ) def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -374,7 +374,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste volumes=[volume_1], ), ) - if data_set_type != "ksds" + if data_set_type != "KSDS" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, @@ -393,14 +393,14 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste # * because that means data set exists and is VSAM so we can't read it results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set)) for result in results.contacted.values(): - assert "EDC5041I" in result.get("stderr", "") + assert "EDC5041I" or "EDC5049I" in result.get("stderr", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "record_format", - ["u", "vb", "vba", "fb", "fba"], + ["U", "VB", "VBA", "FB", "FBA"], ) def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: @@ -453,7 +453,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -505,7 +505,7 @@ def test_return_text_content_encodings( default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -544,7 +544,7 @@ def test_reuse_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -555,7 +555,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", reuse=True, return_content=dict(type="text"), ), @@ -577,7 +577,7 @@ def test_replace_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -588,7 +588,7 @@ def test_replace_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, return_content=dict(type="text"), ), @@ -619,7 +619,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, return_content=dict(type="text"), ), @@ -636,7 +636,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, return_content=dict(type="text"), @@ -687,7 +687,7 @@ def test_input_empty(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -719,7 +719,7 @@ def test_input_large(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -752,7 +752,7 @@ def test_input_provided_as_list(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -792,7 +792,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", ), ), dict( @@ -844,7 +844,7 @@ def test_input_return_text_content_encodings( dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", ), ), dict( @@ -1164,7 +1164,7 @@ def test_file_record_length(ansible_zos_module, record_length): @pytest.mark.parametrize( "record_format", - ["u", "vb", "vba", "fb", "fba"], + ["U", "VB", "VBA", "FB", "FBA"], ) def test_file_record_format(ansible_zos_module, record_format): try: @@ -1353,7 +1353,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ) ), @@ -1361,7 +1361,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1391,8 +1391,8 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu hosts = ansible_zos_module default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() - hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="SEQ") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1405,7 +1405,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, return_content=dict(type="text"), @@ -1415,7 +1415,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, ) @@ -1462,7 +1462,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' - hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") + hosts.all.zos_data_set(name=default_data_set, state="present", type="PDS") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1482,7 +1482,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1538,7 +1538,7 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1766,7 +1766,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co try: hosts = ansible_zos_module default_data_set = "ANSIBLE.USER.PRIVATE.TEST" - hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) diff --git a/tests/unit/test_zos_mvs_raw_unit.py b/tests/unit/test_zos_mvs_raw_unit.py index e50734756..f528412da 100644 --- a/tests/unit/test_zos_mvs_raw_unit.py +++ b/tests/unit/test_zos_mvs_raw_unit.py @@ -59,7 +59,7 @@ def run_command(self, *args, **kwargs): "new", "keep", "keep", - "cyl", + "CYL", 5, 1, "smsclas1", @@ -67,17 +67,17 @@ def run_command(self, *args, **kwargs): "smsclas1", 80, "SOMEKEYLAB100", - "library", + "LIBRARY", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "u", + "U", ), ( "data.set.name(mem1)", "shr", "delete", "keep", - "trk", + "TRK", "5", 1, "smsclas1", @@ -85,17 +85,17 @@ def run_command(self, *args, **kwargs): "smsclas3", 120, "somekeylab1", - "basic", + "BASIC", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "fb", + "FB", ), ( "DATA.NAME.HERE.NOW", "old", "catalog", "uncatalog", - "b", + "B", 55, "100", "SMSCLASS", @@ -103,17 +103,17 @@ def run_command(self, *args, **kwargs): "smscD@s3", 120, "keyfor342fdsme", - "large", + "LARGE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "fba", + "FBA", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "g", + "G", 1, "9", "SMSCLASS", @@ -121,17 +121,17 @@ def run_command(self, *args, **kwargs): "", 120, "keyfor342fdsme", - "pdse", + "PDSE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "vb", + "VB", ), ( "DAT$.now", "new", "delete", "keep", - "m", + "M", 1, 9, "SMSCLASS", @@ -139,10 +139,10 @@ def run_command(self, *args, **kwargs): "", 0, "", - "lds", + "LDS", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "vba", + "VBA", ), ], ) @@ -237,7 +237,7 @@ def test_argument_parsing_data_set( "delete", 0, 100, - "fb", + "FB", "record", "r", ["ocreat", "oappend", "onoctty"], @@ -248,14 +248,14 @@ def test_argument_parsing_data_set( "delete", 200, "100", - "fba", + "FBA", "record", "w", ["oappend", "osync"], ), - ("/u/OEUSR01", "keep", "delete", 0, 100, "vb", "binary", "rw", ["ononblock"]), - ("/u/testmeee", "keep", "delete", 0, 100, "vba", "record", "read_only", []), - ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "u", "text", "write_only", []), + ("/u/OEUSR01", "keep", "delete", 0, 100, "VB", "binary", "rw", ["ononblock"]), + ("/u/testmeee", "keep", "delete", 0, 100, "VBA", "record", "read_only", []), + ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "U", "text", "write_only", []), ], ) def test_argument_parsing_unix( @@ -338,7 +338,7 @@ def test_argument_parsing_unix( "old", "keep", "keep", - "cyl", + "CYL", 5, 1, "smsclas1", @@ -346,17 +346,17 @@ def test_argument_parsing_unix( "smsclas1", 80, "SOMEKEYLAB100", - "library", + "LIBRARY", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "u", + "U", ), ( "data.set.name(mem1waytoolong)", "excl", "delete", "keep", - "trk", + "TRK", "5", 1, "smsclas1", @@ -364,10 +364,10 @@ def test_argument_parsing_unix( "smsclas3", 120, "somekeylab1", - "basic", + "BASIC", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "fb", + "FB", ), ( "DATA.NAME.HERE.NOW", @@ -382,17 +382,17 @@ def test_argument_parsing_unix( "smscD@s3", 120, "keyfor342fdsme", - "large", + "LARGE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "fba", + "FBA", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "g", + "G", 1, "9", "SMSCLASSsss", @@ -400,17 +400,17 @@ def test_argument_parsing_unix( "", 120, "keyfor342fdsme", - "pdse", + "PDSE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "vb", + "VB", ), ( "DAT$.now", "new", "delete", "meep", - "m", + "M", 1, 9, "SMSCLASS", @@ -418,10 +418,10 @@ def test_argument_parsing_unix( "", 0, "", - "ksdss", + "KSDSS", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "vba", + "VBA", ), ], ) @@ -525,7 +525,7 @@ def test_argument_parsing_data_set_failure_path( "delete", 200, "100", - "fba", + "FBA", "record", "w", ["append", "osync"], @@ -537,12 +537,12 @@ def test_argument_parsing_data_set_failure_path( "delete", 0, 100, - "vba", + "VBA", "record", "read_only", ["hello"], ), - ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "u", "text", None, []), + ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "U", "text", None, []), ], ) def test_argument_parsing_unix_failure_path( @@ -620,7 +620,7 @@ def test_ksds_defaults( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ksds", + "type": "KSDS", } }, ], @@ -663,7 +663,7 @@ def test_ksds_exception_key_length( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "esds", + "type": "ESDS", "key_length": 5, } }, @@ -693,7 +693,7 @@ def test_ksds_exception_key_offset( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "esds", + "type": "ESDS", "key_offset": 5, } }, From 2697e32b474ec33832e2977c3e73246904c3e5ad Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 4 Apr 2024 12:23:10 -0400 Subject: [PATCH 337/413] Removed a test condition that obscured duration (#1364) * removed a function in a test that would obscure if null durations are coming back it appears this issue is resolved. * added changelog --------- Co-authored-by: Demetri <dimatos@gmail.com> --- changelogs/fragments/1032-clean-job_submit-test.yml | 3 +++ tests/functional/modules/test_zos_job_submit_func.py | 6 ++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1032-clean-job_submit-test.yml diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml new file mode 100644 index 000000000..bb4248aec --- /dev/null +++ b/changelogs/fragments/1032-clean-job_submit-test.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. + (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index bae4dbb36..c148b6223 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -695,10 +695,8 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration'): - duration = result.get('duration') - else: - duration = 0 + + duration = result.get('duration') if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) From aeafa82cb02c19068f8f704b093a6b07dec15392 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 5 Apr 2024 11:20:17 -0600 Subject: [PATCH 338/413] Updated actions to only run when PR is not draft (#1412) * Updated actions to only run when PR is not draft * Add test * Modified draft condition * Update zos_apf.py * Modified workflows * test * test --- .github/workflows/ac-ansible-test-sanity.yml | 2 ++ .github/workflows/ac-bandit.yml | 6 +++- .github/workflows/ac-galaxy-importer.yml | 34 +++++++++++++++++++- .github/workflows/ac_changelog.yml | 2 ++ 4 files changed, 42 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ac-ansible-test-sanity.yml b/.github/workflows/ac-ansible-test-sanity.yml index 1354195a5..d0c4b58d2 100644 --- a/.github/workflows/ac-ansible-test-sanity.yml +++ b/.github/workflows/ac-ansible-test-sanity.yml @@ -2,6 +2,7 @@ name: AC Ansible sanity on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] branches: - dev - staging* @@ -38,6 +39,7 @@ on: jobs: ansible-sanity: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest env: branch: ${{ github.event.pull_request.head.ref }} diff --git a/.github/workflows/ac-bandit.yml b/.github/workflows/ac-bandit.yml index 288fb92b1..1b93e40a4 100644 --- a/.github/workflows/ac-bandit.yml +++ b/.github/workflows/ac-bandit.yml @@ -2,12 +2,16 @@ name: AC Bandit on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] branches: - dev - staging* - + paths: + - 'plugins/**' + jobs: bandit: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: diff --git a/.github/workflows/ac-galaxy-importer.yml b/.github/workflows/ac-galaxy-importer.yml index 271f01c22..563d37ada 100644 --- a/.github/workflows/ac-galaxy-importer.yml +++ b/.github/workflows/ac-galaxy-importer.yml @@ -2,12 +2,44 @@ name: AC Galaxy Importer on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] branches: - dev - staging* - + paths-ignore: + - '**.tar.gz' + - 'pycache/**' + - '.ansible-lint' + - 'cache/**' + - '.DS_Store' + - '.git/**' + - '.github/**' + - '.gitignore' + - '.python-version' + - '.pytest_cache/**' + - '.vscode/**' + - 'Jenkinsfile' + - 'ac' + - 'ansible.cfg' + - 'changelogs/**' + - 'collections/**' + - 'docs/**' + - 'scripts/**' + - 'test_config.yml' + - 'tests/*.ini' + - 'tests/*.py' + - 'tests/.pytest_cache' + - 'tests/pycache' + - 'tests/functional' + - 'tests/helpers' + - 'tests/requirements.txt' + - 'tests/unit' + - 'tests/sanity/ignore-*' + - 'venv*' + jobs: galaxy-importer: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: diff --git a/.github/workflows/ac_changelog.yml b/.github/workflows/ac_changelog.yml index 523e207b9..e3b3f3cc4 100644 --- a/.github/workflows/ac_changelog.yml +++ b/.github/workflows/ac_changelog.yml @@ -2,6 +2,7 @@ name: AC Changelog Lint on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] paths: - 'changelogs/fragments/*' branches: @@ -10,6 +11,7 @@ on: jobs: lint: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: From d8b87a42117c99144bedd93e4f0b5f7964fc112c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 5 Apr 2024 11:20:49 -0600 Subject: [PATCH 339/413] [Documentation][encode] Add and standarize docstring to encode.py (#1322) * Add and estandarize docstring to encode.py * Create changelog fragment * Modified the google style to numpy * Update changelog fragment * Standarize numpy style * Update encode.py added newline to address pep8 error * Fixed some dcostrings * Modified docstrings --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1322-update-docstring-encode.yml | 3 + plugins/module_utils/encode.py | 357 +++++++++++++----- 2 files changed, 269 insertions(+), 91 deletions(-) create mode 100644 changelogs/fragments/1322-update-docstring-encode.yml diff --git a/changelogs/fragments/1322-update-docstring-encode.yml b/changelogs/fragments/1322-update-docstring-encode.yml new file mode 100644 index 000000000..dd5eb5389 --- /dev/null +++ b/changelogs/fragments/1322-update-docstring-encode.yml @@ -0,0 +1,3 @@ +trivial: + - encode - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1322). \ No newline at end of file diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 195802583..f68a8ab77 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -56,10 +56,12 @@ class Defaults: @staticmethod def get_default_system_charset(): - """Get the default encoding of the current machine + """Get the default encoding of the current machine. - Returns: - str -- The encoding of the current machine + Returns + ------- + str + The encoding of the current machine. """ system_charset = locale.getdefaultlocale()[1] if system_charset is None: @@ -80,15 +82,24 @@ def get_default_system_charset(): class EncodeUtils(object): def __init__(self): """Call the coded character set conversion utility iconv - to convert a USS file from one coded character set to another - - Arguments: - module {AnsibleModule} -- The AnsibleModule object from currently running module + to convert a USS file from one coded character set to another. """ self.module = AnsibleModuleHelper(argument_spec={}) self.tmphlq = None def _validate_data_set_name(self, ds): + """Validate data set name using BetterArgParser. + + Parameters + ---------- + ds : str + The source data set name. + + Returns + ------- + str + Parsed data set name. + """ arg_defs = dict( ds=dict(arg_type="data_set"), ) @@ -97,6 +108,18 @@ def _validate_data_set_name(self, ds): return parsed_args.get("ds") def _validate_path(self, path): + """Validate path using BetterArgParser. + + Parameters + ---------- + path : str + The path. + + Returns + ------- + str + Parsed path. + """ arg_defs = dict( path=dict(arg_type="path"), ) @@ -105,6 +128,18 @@ def _validate_path(self, path): return parsed_args.get("path") def _validate_data_set_or_path(self, path): + """Validate data set or path using BetterArgParser. + + Parameters + ---------- + path : str + The path. + + Returns + ------- + str + Parsed path. + """ arg_defs = dict( path=dict(arg_type="data_set_or_path"), ) @@ -113,6 +148,18 @@ def _validate_data_set_or_path(self, path): return parsed_args.get("path") def _validate_encoding(self, encoding): + """Validate encoding using BetterArgParser. + + Parameters + --------- + encoding : str + The encoding. + + Returns + ------- + str + Parsed encoding. + """ arg_defs = dict( encoding=dict(arg_type="encoding"), ) @@ -122,16 +169,24 @@ def _validate_encoding(self, encoding): def listdsi_data_set(self, ds): """Invoke IDCAMS LISTCAT command to get the record length and space used - to estimate the space used by the VSAM data set - - Arguments: - ds: {str} -- The VSAM data set to be checked. - - Raises: - EncodeError: When any exception is raised during the conversion. - Returns: - int -- The maximum record length of the VSAM data set. - int -- The space used by the VSAM data set(KB). + to estimate the space used by the VSAM data set. + + Parameters + ---------- + ds : str + The VSAM data set to be checked. + + Returns + ------- + int + The maximum record length of the VSAM data set. + int + The space used by the VSAM data set(KB). + + Raises + ------ + EncodeError + When any exception is raised during the conversion. """ ds = self._validate_data_set_name(ds) reclen = 80 @@ -179,17 +234,24 @@ def listdsi_data_set(self, ds): return reclen, space_u def temp_data_set(self, reclen, space_u): - """Creates a temporary data set with the given record length and size - - Arguments: - size {str} -- The size of the data set - lrecl {int} -- The record length of the data set - - Returns: - str -- Name of the allocated data set - - Raises: - ZOAUException: When any exception is raised during the data set allocation. + """Creates a temporary data set with the given record length and size. + + Parameters + ---------- + lrecl : int + The record length of the data set. + space_u : str + The size of the data set. + + Returns + ------- + str + Name of the allocated data set. + + Raises + ------ + ZOAUException + When any exception is raised during the data set allocation. DatasetVerificationError: When the data set creation could not be verified. """ size = str(space_u * 2) + "K" @@ -208,12 +270,17 @@ def temp_data_set(self, reclen, space_u): return temporary_data_set.name def get_codeset(self): - """Get the list of supported encodings from the USS command 'iconv -l' + """Get the list of supported encodings from the USS command 'iconv -l'. + + Returns + ------- + Union[str] + The code set list supported in current USS platform. - Raises: - EncodeError: When any exception is raised during the conversion - Returns: - list -- The code set list supported in current USS platform + Raises + ------ + EncodeError + When any exception is raised during the conversion. """ code_set = None iconv_list_cmd = ["iconv", "-l"] @@ -226,17 +293,26 @@ def get_codeset(self): return code_set def string_convert_encoding(self, src, from_encoding, to_encoding): - """Convert the encoding of the data when the src is a normal string - - Arguments: - from_code_set: {str} -- The source code set of the string - to_code_set: {str} -- The destination code set for the string - src: {str} -- The input string content - - Raises: - EncodeError: When any exception is raised during the conversion - Returns: - str -- The string content after the encoding + """Convert the encoding of the data when the src is a normal string. + + Parameters + ---------- + src : str + The input string content. + from_encoding : str + The source code set of the string. + to_encoding : str + The destination code set for the string. + + Returns + ------- + str + The string content after the encoding. + + Raises + ------ + EncodeError + When any exception is raised during the conversion. """ from_encoding = self._validate_encoding(from_encoding) to_encoding = self._validate_encoding(to_encoding) @@ -249,19 +325,30 @@ def string_convert_encoding(self, src, from_encoding, to_encoding): return out def uss_convert_encoding(self, src, dest, from_code, to_code): - """Convert the encoding of the data in a USS file - - Arguments: - from_code: {str} -- The source code set of the input file - to_code: {str} -- The destination code set for the output file - src: {str} -- The input file name, it should be a uss file - dest: {str} -- The output file name, it should be a uss file - - Raises: - EncodeError: When any exception is raised during the conversion. - MoveFileError: When any exception is raised during moving files. - Returns: - boolean -- Indicate whether the conversion is successful or not. + """Convert the encoding of the data in a USS file. + + Parameters + ---------- + src : str + The input file name, it should be a uss file. + dest : str + The output file name, it should be a uss file. + from_code : str + The source code set of the input file. + to_code : str + The destination code set for the output file. + + Returns + ------- + bool + Indicate whether the conversion is successful or not. + + Raises + ------ + EncodeError + When any exception is raised during the conversion. + MoveFileError + When any exception is raised during moving files. """ src = self._validate_path(src) dest = self._validate_path(dest) @@ -306,18 +393,28 @@ def uss_convert_encoding(self, src, dest, from_code, to_code): def uss_convert_encoding_prev(self, src, dest, from_code, to_code): """For multiple files conversion, such as a USS path or MVS PDS data set, - use this method to split then do the conversion - - Arguments: - from_code: {str} -- The source code set of the input path - to_code: {str} -- The destination code set for the output path - src: {str} -- The input uss path or a file - dest: {str} -- The output uss path or a file - - Raises: - EncodeError: When direcotry is empty or copy multiple files to a single file - Returns: - boolean -- Indicate whether the conversion is successful or not + use this method to split then do the conversion. + + Parameters + ---------- + src : str + The input uss path or a file. + dest : str + The output uss path or a file. + from_code : str + The source code set of the input path. + to_code : str + The destination code set for the output path. + + Returns + ------- + bool + Indicate whether the conversion is successful or not. + + Raises + ------ + EncodeError + When directory is empty or copy multiple files to a single file. """ src = self._validate_path(src) dest = self._validate_path(dest) @@ -375,18 +472,28 @@ def mvs_convert_encoding( 2) MVS to USS 3) MVS to MVS - Arguments: - src: {str} -- The input MVS data set or USS path to be converted - dest: {str} -- The output MVS data set or USS path to be converted - from_code: {str} -- The source code set of the input MVS data set - to_code: {str} -- The destination code set of the output MVS data set - - Keyword Arguments: - src_type {[type]} -- The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS) (default: {None}) - dest_type {[type]} -- The output MVS data set type (default: {None}) - - Returns: - boolean -- Indicate whether the conversion is successful or not + Parameters + ---------- + src : str + The input MVS data set or USS path to be converted. + dest : str + The output MVS data set or USS path to be converted. + from_code : str + The source code set of the input MVS data set. + to_code : str + The destination code set of the output MVS data set. + + Keyword Parameters + ----------------- + src_type : str + The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS). + dest_type : str + The output MVS data set type. + + Returns + ------- + bool + Indicate whether the conversion is successful or not. """ src = self._validate_data_set_or_path(src) dest = self._validate_data_set_or_path(dest) @@ -458,11 +565,18 @@ def uss_tag_encoding(self, file_path, tag): """Tag the file/directory specified with the given code set. If `file_path` is a directory, all of the files and subdirectories will be tagged recursively. - Arguments: - file_path {str} -- Absolute file path to tag. - tag {str} -- Code set to tag the file/directory. - Raises: - TaggingError: When the chtag command fails. + + Parameters + ---------- + file_path : str + Absolute file path to tag. + tag : str + Code set to tag the file/directory. + + Raises + ------ + TaggingError + When the chtag command fails. """ is_dir = os.path.isdir(file_path) @@ -473,11 +587,18 @@ def uss_tag_encoding(self, file_path, tag): def uss_file_tag(self, file_path): """Returns the current tag set for a file. - Arguments: - file_path {str} -- USS path to the file. - Returns: - str -- Current tag set for the file, as returned by 'ls -T' - None -- If the file does not exist or the command fails. + + Parameters + ---------- + file_path : str + USS path to the file. + + Returns + ------- + str + Current tag set for the file, as returned by 'ls -T'. + None + If the file does not exist or the command fails. """ if not os.path.exists(file_path): return None @@ -500,12 +621,50 @@ def uss_file_tag(self, file_path): class EncodeError(Exception): def __init__(self, message): + """Error during encoding. + + Parameters + ---------- + message : str + Human readable string describing the exception. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = 'An error occurred during encoding: "{0}"'.format(message) super(EncodeError, self).__init__(self.msg) class TaggingError(Exception): def __init__(self, file_path, tag, rc, stdout, stderr): + """Error during tagging. + + Parameters + ---------- + file_path : str + File to tag. + tag : str + Tag to put in the file. + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + """ self.msg = 'An error occurred during tagging of {0} to {1}'.format( file_path, tag @@ -518,5 +677,21 @@ def __init__(self, file_path, tag, rc, stdout, stderr): class MoveFileError(Exception): def __init__(self, src, dest, e): + """Error while moving a file. + + Parameters + ---------- + src : str + From where the file moves. + dest : str + To where the file moves. + e : str + Exception message. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = "Failed when moving {0} to {1}: {2}".format(src, dest, e) super().__init__(self.msg) From 5b239b1afe04ec4800b93e044f3857ebc10e0d0c Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 16 Apr 2024 08:46:04 -0700 Subject: [PATCH 340/413] [v1.10.0] [Enabler] Standardization of choices in modules (#1388) * Update zos_archive choices * Update zos_backup_restore choices * Update zos_copy choices * Update zos_data_set choices * Update module docs * Update zos_job_submit choices * Update zos_mount choices * Update zos_unarchive choices * Fix zos_archive and update its tests This also includes major work on zos_data_set since half of the test suite for zos_archive depends on creating data sets. * Update zos_backup_restore tests * Update zos_blockinfile tests * Update more modules * Updated more tests * Update zos_unarchive and zos_mount * Update zos_backup_restore unit tests * Update zos_mvs_raw * Update zos_copy tests * Fix some sanity issues * Fix zos_copy KSDS test * Update zos_copy some more * Fix ZFS call * Update zos_unarchive tests * Add massive changelog fragment * Fix call to zos_data_set * Fix more test issues in zos_fetch * Fix zos_find tests * Generate updated docs --- .../fragments/1388-lowercase-choices.yml | 87 +++++ docs/source/modules/zos_apf.rst | 68 ++-- docs/source/modules/zos_apf.rst-e | 318 +++++++++++++++ docs/source/modules/zos_archive.rst | 102 ++--- docs/source/modules/zos_backup_restore.rst | 80 ++-- docs/source/modules/zos_blockinfile.rst | 52 +-- docs/source/modules/zos_copy.rst | 226 ++++++----- docs/source/modules/zos_data_set.rst | 222 +++++------ docs/source/modules/zos_encode.rst | 32 +- docs/source/modules/zos_fetch.rst | 18 +- docs/source/modules/zos_find.rst | 20 +- docs/source/modules/zos_gather_facts.rst | 14 +- docs/source/modules/zos_job_output.rst | 16 +- docs/source/modules/zos_job_query.rst | 20 +- docs/source/modules/zos_job_submit.rst | 95 +++-- docs/source/modules/zos_lineinfile.rst | 68 ++-- docs/source/modules/zos_mount.rst | 124 +++--- docs/source/modules/zos_mvs_raw.rst | 364 +++++++++--------- docs/source/modules/zos_operator.rst | 2 +- .../modules/zos_operator_action_query.rst | 20 +- docs/source/modules/zos_ping.rst | 8 +- docs/source/modules/zos_script.rst | 32 +- docs/source/modules/zos_tso_command.rst | 4 +- docs/source/modules/zos_unarchive.rst | 68 ++-- docs/source/modules/zos_volume_init.rst | 34 +- plugins/action/zos_copy.py | 12 +- plugins/action/zos_job_submit.py | 6 +- plugins/action/zos_unarchive.py | 6 +- plugins/module_utils/data_set.py | 2 +- plugins/modules/zos_archive.py | 84 ++-- plugins/modules/zos_backup_restore.py | 32 +- plugins/modules/zos_copy.py | 93 ++--- plugins/modules/zos_data_set.py | 354 +++++++++-------- plugins/modules/zos_job_submit.py | 52 +-- plugins/modules/zos_mount.py | 138 +++---- plugins/modules/zos_mvs_raw.py | 252 ++++++------ plugins/modules/zos_unarchive.py | 62 +-- .../modules/test_zos_archive_func.py | 90 ++--- .../modules/test_zos_backup_restore.py | 20 +- .../modules/test_zos_blockinfile_func.py | 18 +- .../functional/modules/test_zos_copy_func.py | 326 ++++++++-------- .../modules/test_zos_data_set_func.py | 80 ++-- .../modules/test_zos_encode_func.py | 16 +- .../functional/modules/test_zos_fetch_func.py | 32 +- .../functional/modules/test_zos_find_func.py | 16 +- .../modules/test_zos_job_output_func.py | 4 +- .../modules/test_zos_job_query_func.py | 8 +- .../modules/test_zos_job_submit_func.py | 58 +-- .../modules/test_zos_lineinfile_func.py | 17 +- .../functional/modules/test_zos_mount_func.py | 38 +- .../modules/test_zos_mvs_raw_func.py | 86 ++--- .../modules/test_zos_unarchive_func.py | 104 ++--- tests/unit/test_zos_backup_restore_unit.py | 2 +- tests/unit/test_zos_mvs_raw_unit.py | 80 ++-- 54 files changed, 2302 insertions(+), 1880 deletions(-) create mode 100644 changelogs/fragments/1388-lowercase-choices.yml create mode 100644 docs/source/modules/zos_apf.rst-e diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml new file mode 100644 index 000000000..0f14f42fe --- /dev/null +++ b/changelogs/fragments/1388-lowercase-choices.yml @@ -0,0 +1,87 @@ +breaking_changes: + - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + Suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. + Suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_backup_restore - option ``space_type`` no longer accepts uppercase + choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + Option ``space_type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``record_format`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Options inside ``batch`` no longer accept uppercase choices, users should + replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_submit - option ``location`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``fs_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + Option ``unmount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``mount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``tag_untagged`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``automove`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboptions ``disposition_normal`` and ``disposition_abnormal`` of + ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. + This also applies when defining a ``dd_data_set`` inside ``dd_concat``. + Suboption ``space_type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dd_unix`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Options inside ``dd_concat`` no longer accept uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + +trivial: + - zos_blockinfile - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_find - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_lineinfile - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_encode - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_fetch - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_output - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_query - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). \ No newline at end of file diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index e9a55c007..73d616e76 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -37,7 +37,7 @@ library state - Ensure that the library is added ``state=present`` or removed ``state=absent``. + Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . The APF list format has to be "DYNAMIC". @@ -58,24 +58,24 @@ force_dynamic volume - The identifier for the volume containing the library specified in the ``library`` parameter. The values must be one the following. + The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. 1. The volume serial number. - 2. Six asterisks (******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If ``volume`` is not specified, ``library`` has to be cataloged. + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If ``sms=True``, ``volume`` value will be ignored. + If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. | **required**: False | **type**: bool @@ -83,13 +83,13 @@ sms operation - Change APF list format to "DYNAMIC" ``operation=set_dynamic`` or "STATIC" ``operation=set_static`` + Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ - Display APF list current format ``operation=check_format`` + Display APF list current format \ :literal:`operation=check\_format`\ - Display APF list entries when ``operation=list`` ``library``, ``volume`` and ``sms`` will be used as filters. + Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. - If ``operation`` is not set, add or remove operation will be ignored. + If \ :literal:`operation`\ is not set, add or remove operation will be ignored. | **required**: False | **type**: str @@ -99,23 +99,23 @@ operation tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str persistent - Add/remove persistent entries to or from *data_set_name* + Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ - ``library`` will not be persisted or removed if ``persistent=None`` + \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ | **required**: False | **type**: dict data_set_name - The data set name used for persisting or removing a ``library`` from the APF list. + The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. | **required**: True | **type**: str @@ -124,13 +124,13 @@ persistent marker The marker line template. - ``{mark}`` will be replaced with "BEGIN" and "END". + \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". - Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. - ``{mark}`` length may not exceed 72 characters. + \ :literal:`{mark}`\ length may not exceed 72 characters. - The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format | **required**: False | **type**: str @@ -138,9 +138,9 @@ persistent backup - Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". + Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -152,11 +152,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name must be an MVS data set name. + If the source is an MVS data set, the backup\_name must be an MVS data set name. - If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -168,9 +168,9 @@ persistent batch A list of dictionaries for adding or removing libraries. - This is mutually exclusive with ``library``, ``volume``, ``sms`` + This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ - Can be used with ``persistent`` + Can be used with \ :literal:`persistent`\ | **required**: False | **type**: list @@ -185,24 +185,24 @@ batch volume - The identifier for the volume containing the library specified on the ``library`` parameter. The values must be one of the following. + The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. 1. The volume serial number - 2. Six asterisks (******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If ``volume`` is not specified, ``library`` has to be cataloged. + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If true ``volume`` will be ignored. + If true \ :literal:`volume`\ will be ignored. | **required**: False | **type**: bool @@ -283,9 +283,9 @@ Return Values stdout The stdout from ZOAU command apfadm. Output varies based on the type of operation. - state> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm - operation> stdout of operation options list> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set_dynamic> Set to DYNAMIC set_static> Set to STATIC check_format> DYNAMIC or STATIC + operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_apf.rst-e b/docs/source/modules/zos_apf.rst-e new file mode 100644 index 000000000..ec8e6824c --- /dev/null +++ b/docs/source/modules/zos_apf.rst-e @@ -0,0 +1,318 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_apf.py + +.. _zos_apf_module: + + +zos_apf -- Add or remove libraries to Authorized Program Facility (APF) +======================================================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Adds or removes libraries to Authorized Program Facility (APF). +- Manages APF statement persistent entries to a data set or data set member. +- Changes APF list format to "DYNAMIC" or "STATIC". +- Gets the current APF list entries. + + + + + +Parameters +---------- + + +library + The library name to be added or removed from the APF list. + + | **required**: False + | **type**: str + + +state + Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . + + The APF list format has to be "DYNAMIC". + + | **required**: False + | **type**: str + | **default**: present + | **choices**: absent, present + + +force_dynamic + Will force the APF list format to "DYNAMIC" before adding or removing libraries. + + If the format is "STATIC", the format will be changed to "DYNAMIC". + + | **required**: False + | **type**: bool + | **default**: False + + +volume + The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. + + 1. The volume serial number. + + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + + | **required**: False + | **type**: str + + +sms + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + + If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. + + | **required**: False + | **type**: bool + | **default**: False + + +operation + Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ + + Display APF list current format \ :literal:`operation=check\_format`\ + + Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. + + If \ :literal:`operation`\ is not set, add or remove operation will be ignored. + + | **required**: False + | **type**: str + | **choices**: set_dynamic, set_static, check_format, list + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup datasets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + + | **required**: False + | **type**: str + + +persistent + Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ + + \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ + + | **required**: False + | **type**: dict + + + data_set_name + The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. + + | **required**: True + | **type**: str + + + marker + The marker line template. + + \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". + + Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + + \ :literal:`{mark}`\ length may not exceed 72 characters. + + The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + + | **required**: False + | **type**: str + | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */ + + + backup + Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". + + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + + The backup file name will be return on either success or failure of module execution such that data can be retrieved. + + | **required**: False + | **type**: bool + | **default**: False + + + backup_name + Specify the USS file name or data set name for the destination backup. + + If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + + If the source is an MVS data set, the backup\_name must be an MVS data set name. + + If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + + If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. + + | **required**: False + | **type**: str + + + +batch + A list of dictionaries for adding or removing libraries. + + This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ + + Can be used with \ :literal:`persistent`\ + + | **required**: False + | **type**: list + | **elements**: dict + + + library + The library name to be added or removed from the APF list. + + | **required**: True + | **type**: str + + + volume + The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. + + 1. The volume serial number + + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + + | **required**: False + | **type**: str + + + sms + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + + If true \ :literal:`volume`\ will be ignored. + + | **required**: False + | **type**: bool + | **default**: False + + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Add a library to the APF list + zos_apf: + library: SOME.SEQUENTIAL.DATASET + volume: T12345 + - name: Add a library (cataloged) to the APF list and persistence + zos_apf: + library: SOME.SEQUENTIAL.DATASET + force_dynamic: True + persistent: + data_set_name: SOME.PARTITIONED.DATASET(MEM) + - name: Remove a library from the APF list and persistence + zos_apf: + state: absent + library: SOME.SEQUENTIAL.DATASET + volume: T12345 + persistent: + data_set_name: SOME.PARTITIONED.DATASET(MEM) + - name: Batch libraries with custom marker, persistence for the APF list + zos_apf: + persistent: + data_set_name: "SOME.PARTITIONED.DATASET(MEM)" + marker: "/* {mark} PROG001 USR0010 */" + batch: + - library: SOME.SEQ.DS1 + - library: SOME.SEQ.DS2 + sms: True + - library: SOME.SEQ.DS3 + volume: T12345 + - name: Print the APF list matching library pattern or volume serial number + zos_apf: + operation: list + library: SOME.SEQ.* + volume: T12345 + - name: Set the APF list format to STATIC + zos_apf: + operation: set_static + + + + +Notes +----- + +.. note:: + It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF® FACILITY resource class. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. + + To add or delete the APF list entry for library libname, you must have UPDATE authority to the RACF® FACILITY resource class entity CSVAPF.libname, or there must be no FACILITY class profile that protects that entity. + + To change the format of the APF list to dynamic, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.DYNAMIC, or there must be no FACILITY class profile that protects that entity. + + To change the format of the APF list back to static, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.STATIC, or there must be no FACILITY class profile that protects that entity. + + + + + + + +Return Values +------------- + + +stdout + The stdout from ZOAU command apfadm. Output varies based on the type of operation. + + state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + + operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC + + | **returned**: always + | **type**: str + +stderr + The error messages from ZOAU command apfadm + + | **returned**: always + | **type**: str + | **sample**: BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list. + +rc + The return code from ZOAU command apfadm + + | **returned**: always + | **type**: int + +msg + The module messages + + | **returned**: failure + | **type**: str + | **sample**: Parameter verification failed + +backup_name + Name of the backup file or data set that was created. + + | **returned**: if backup=true, always + | **type**: str + diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index fe93474f0..3249f3ba8 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -20,7 +20,7 @@ Synopsis - Sources for archiving must be on the remote z/OS system. - Supported sources are USS (UNIX System Services) or z/OS data sets. - The archive remains on the remote z/OS system. -- For supported archive formats, see option ``format``. +- For supported archive formats, see option \ :literal:`format`\ . @@ -35,7 +35,7 @@ src USS file paths should be absolute paths. - MVS data sets supported types are: ``SEQ``, ``PDS``, ``PDSE``. + MVS data sets supported types are: \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . VSAMs are not supported. @@ -68,7 +68,7 @@ format terse_pack - Compression option for use with the terse format, *name=terse*. + Compression option for use with the terse format, \ :emphasis:`name=terse`\ . Pack will compress records in a data set so that the output results in lossless data compression. @@ -78,7 +78,7 @@ format | **required**: False | **type**: str - | **choices**: PACK, SPACK + | **choices**: pack, spack xmit_log_data_set @@ -88,14 +88,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the *xmit_log_data_set* name, ensure there is adequate space. + When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. + If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using \ :literal:`xmit`\ or \ :literal:`terse`\ . | **required**: False | **type**: bool @@ -107,19 +107,19 @@ format dest The remote absolute path or data set where the archive should be created. - *dest* can be a USS file or MVS data set name. + \ :emphasis:`dest`\ can be a USS file or MVS data set name. - If *dest* has missing parent directories, they will be created. + If \ :emphasis:`dest`\ has missing parent directories, they will be created. - If *dest* is a nonexistent USS file, it will be created. + If \ :emphasis:`dest`\ is a nonexistent USS file, it will be created. - If *dest* is an existing file or data set and *force=true*, the existing *dest* will be deleted and recreated with attributes defined in the *dest_data_set* option or computed by the module. + If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=true`\ , the existing \ :emphasis:`dest`\ will be deleted and recreated with attributes defined in the \ :emphasis:`dest\_data\_set`\ option or computed by the module. - If *dest* is an existing file or data set and *force=false* or not specified, the module exits with a note to the user. + If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=false`\ or not specified, the module exits with a note to the user. - Destination data set attributes can be set using *dest_data_set*. + Destination data set attributes can be set using \ :emphasis:`dest\_data\_set`\ . - Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the *dest_data_set* option will improve performance. + Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the \ :emphasis:`dest\_data\_set`\ option will improve performance. | **required**: True | **type**: str @@ -128,9 +128,9 @@ dest exclude Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from src list and glob expansion. - Patterns (wildcards) can contain one of the following, `?`, `*`. + Patterns (wildcards) can contain one of the following, \`?\`, \`\*\`. - * matches everything. + \* matches everything. ? matches any single character. @@ -144,7 +144,7 @@ group When left unspecified, it uses the current group of the current use unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if ``dest`` is USS, otherwise ignored. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str @@ -153,13 +153,13 @@ group mode The permission of the destination archive file. - If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - *mode=preserve* means that the file will be given the same permissions as the src file. + \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the src file. | **required**: False | **type**: str @@ -170,14 +170,14 @@ owner When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if ``dest`` is USS, otherwise ignored. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str remove - Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *src*. + Remove any added source files , trees or data sets after module \ `zos\_archive <./zos_archive.html>`__\ adds them to the archive. Source files, trees and data sets are identified with option \ :emphasis:`src`\ . | **required**: False | **type**: bool @@ -185,7 +185,7 @@ remove dest_data_set - Data set attributes to customize a ``dest`` data set to be archived into. + Data set attributes to customize a \ :literal:`dest`\ data set to be archived into. | **required**: False | **type**: dict @@ -203,23 +203,23 @@ dest_data_set | **required**: False | **type**: str - | **default**: SEQ - | **choices**: SEQ + | **default**: seq + | **choices**: seq space_primary - If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int space_secondary - If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -228,21 +228,21 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk record_format - If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`FB`\ ) - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str - | **choices**: FB, VB, FBA, VBA, U + | **choices**: fb, vb, fba, vba, u record_length @@ -313,18 +313,18 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str force - If set to ``true`` and the remote file or data set ``dest`` will be deleted. Otherwise it will be created with the ``dest_data_set`` attributes or default values if ``dest_data_set`` is not specified. + If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ will be deleted. Otherwise it will be created with the \ :literal:`dest\_data\_set`\ attributes or default values if \ :literal:`dest\_data\_set`\ is not specified. - If set to ``false``, the file or data set will only be copied if the destination does not exist. + If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. - If set to ``false`` and destination exists, the module exits with a note to the user. + If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -373,7 +373,7 @@ Examples format: name: terse format_options: - terse_pack: "SPACK" + terse_pack: "spack" use_adrdssu: True # Use a pattern to store @@ -392,11 +392,11 @@ Notes ----- .. note:: - This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. + This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos\_fetch to retrieve to the controller and then zos\_copy or zos\_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + When packing and using \ :literal:`use\_adrdssu`\ flag the module will take up to two times the space indicated in \ :literal:`dest\_data\_set`\ . - tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. + tar, zip, bz2 and pax are archived using python \ :literal:`tarfile`\ library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. @@ -416,27 +416,27 @@ Return Values state - The state of the input ``src``. + The state of the input \ :literal:`src`\ . - ``absent`` when the source files or data sets were removed. + \ :literal:`absent`\ when the source files or data sets were removed. - ``present`` when the source files or data sets were not removed. + \ :literal:`present`\ when the source files or data sets were not removed. - ``incomplete`` when ``remove`` was true and the source files or data sets were not removed. + \ :literal:`incomplete`\ when \ :literal:`remove`\ was true and the source files or data sets were not removed. | **returned**: always | **type**: str dest_state - The state of the *dest* file or data set. + The state of the \ :emphasis:`dest`\ file or data set. - ``absent`` when the file does not exist. + \ :literal:`absent`\ when the file does not exist. - ``archive`` when the file is an archive. + \ :literal:`archive`\ when the file is an archive. - ``compress`` when the file is compressed, but not an archive. + \ :literal:`compress`\ when the file is compressed, but not an archive. - ``incomplete`` when the file is an archive, but some files under *src* were not found. + \ :literal:`incomplete`\ when the file is an archive, but some files under \ :emphasis:`src`\ were not found. | **returned**: success | **type**: str @@ -454,7 +454,7 @@ archived | **type**: list arcroot - If ``src`` is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. + If \ :literal:`src`\ is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index d70efc7a1..6833279fa 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -47,34 +47,34 @@ data_sets include - When *operation=backup*, specifies a list of data sets or data set patterns to include in the backup. + When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to include in the backup. - When *operation=restore*, specifies a list of data sets or data set patterns to include when restoring from a backup. + When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to include when restoring from a backup. - The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. + The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. - When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark ``?`` or percent sign ``%`` matches a single character. + A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. | **required**: False | **type**: raw exclude - When *operation=backup*, specifies a list of data sets or data set patterns to exclude from the backup. + When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to exclude from the backup. - When *operation=restore*, specifies a list of data sets or data set patterns to exclude when restoring from a backup. + When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to exclude when restoring from a backup. - The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." + The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." - When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark ``?`` or percent sign ``%`` matches a single character. + A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. | **required**: False | **type**: raw @@ -84,22 +84,22 @@ data_sets volume This applies to both data set restores and volume restores. - When *operation=backup* and *data_sets* are provided, specifies the volume that contains the data sets to backup. + When \ :emphasis:`operation=backup`\ and \ :emphasis:`data\_sets`\ are provided, specifies the volume that contains the data sets to backup. - When *operation=restore*, specifies the volume the backup should be restored to. + When \ :emphasis:`operation=restore`\ , specifies the volume the backup should be restored to. - *volume* is required when restoring a full volume backup. + \ :emphasis:`volume`\ is required when restoring a full volume backup. | **required**: False | **type**: str full_volume - When *operation=backup* and *full_volume=True*, specifies that the entire volume provided to *volume* should be backed up. + When \ :emphasis:`operation=backup`\ and \ :emphasis:`full\_volume=True`\ , specifies that the entire volume provided to \ :emphasis:`volume`\ should be backed up. - When *operation=restore* and *full_volume=True*, specifies that the volume should be restored (default is dataset). + When \ :emphasis:`operation=restore`\ and \ :emphasis:`full\_volume=True`\ , specifies that the volume should be restored (default is dataset). - *volume* must be provided when *full_volume=True*. + \ :emphasis:`volume`\ must be provided when \ :emphasis:`full\_volume=True`\ . | **required**: False | **type**: bool @@ -109,18 +109,18 @@ full_volume temp_volume Specifies a particular volume on which the temporary data sets should be created during the backup and restore process. - When *operation=backup* and *backup_name* is a data set, specifies the volume the backup should be placed in. + When \ :emphasis:`operation=backup`\ and \ :emphasis:`backup\_name`\ is a data set, specifies the volume the backup should be placed in. | **required**: False | **type**: str backup_name - When *operation=backup*, the destination data set or UNIX file to hold the backup. + When \ :emphasis:`operation=backup`\ , the destination data set or UNIX file to hold the backup. - When *operation=restore*, the destination data set or UNIX file backup to restore. + When \ :emphasis:`operation=restore`\ , the destination data set or UNIX file backup to restore. - There are no enforced conventions for backup names. However, using a common extension like ``.dzp`` for UNIX files and ``.DZP`` for data sets will improve readability. + There are no enforced conventions for backup names. However, using a common extension like \ :literal:`.dzp`\ for UNIX files and \ :literal:`.DZP`\ for data sets will improve readability. | **required**: True | **type**: str @@ -135,9 +135,9 @@ recover overwrite - When *operation=backup*, specifies if an existing data set or UNIX file matching *backup_name* should be deleted. + When \ :emphasis:`operation=backup`\ , specifies if an existing data set or UNIX file matching \ :emphasis:`backup\_name`\ should be deleted. - When *operation=restore*, specifies if the module should overwrite existing data sets with matching name on the target device. + When \ :emphasis:`operation=restore`\ , specifies if the module should overwrite existing data sets with matching name on the target device. | **required**: False | **type**: bool @@ -145,35 +145,35 @@ overwrite sms_storage_class - When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. + When \ :emphasis:`operation=restore`\ , specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. - When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. + When \ :emphasis:`operation=backup`\ , specifies the storage class to use for temporary data sets created during backup process. - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str sms_management_class - When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. + When \ :emphasis:`operation=restore`\ , specifies the management class to use. The management class will also be used for temporary data sets created during restore process. - When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. + When \ :emphasis:`operation=backup`\ , specifies the management class to use for temporary data sets created during backup process. - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str space - If *operation=backup*, specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. + If \ :emphasis:`operation=backup`\ , specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. - If *operation=restore*, specifies the amount of space to allocate for data sets temporarily created during the restore process. + If \ :emphasis:`operation=restore`\ , specifies the amount of space to allocate for data sets temporarily created during the restore process. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . - When *full_volume=True*, *space* defaults to ``1``, otherwise default is ``25`` + When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space`\ defaults to \ :literal:`1`\ , otherwise default is \ :literal:`25`\ | **required**: False | **type**: int @@ -182,13 +182,13 @@ space space_type The unit of measurement to use when defining data set space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . - When *full_volume=True*, *space_type* defaults to ``G``, otherwise default is ``M`` + When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space\_type`\ defaults to \ :literal:`g`\ , otherwise default is \ :literal:`m`\ | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk hlq @@ -203,7 +203,7 @@ hlq tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup data sets. - The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -251,7 +251,7 @@ Examples include: user.** backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup all datasets matching the pattern USER.** that are present on the volume MYVOL1 to data set MY.BACKUP.DZP, @@ -263,7 +263,7 @@ Examples volume: MYVOL1 backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup an entire volume, MYVOL1, to the UNIX file /tmp/temp_backup.dzp, allocate 1GB for data sets used in backup process. @@ -273,7 +273,7 @@ Examples volume: MYVOL1 full_volume: yes space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Use z/OS username as new HLQ. @@ -317,7 +317,7 @@ Examples full_volume: yes backup_name: MY.BACKUP.DZP space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Specify DB2SMS10 for the SMS storage and management classes to use for the restored diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index f3eef5967..8cd6f756c 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -38,9 +38,9 @@ src state - Whether the block should be inserted or replaced using *state=present*. + Whether the block should be inserted or replaced using \ :emphasis:`state=present`\ . - Whether the block should be removed using *state=absent*. + Whether the block should be removed using \ :emphasis:`state=absent`\ . | **required**: False | **type**: str @@ -51,9 +51,9 @@ state marker The marker line template. - ``{mark}`` will be replaced with the values ``in marker_begin`` (default="BEGIN") and ``marker_end`` (default="END"). + \ :literal:`{mark}`\ will be replaced with the values \ :literal:`in marker\_begin`\ (default="BEGIN") and \ :literal:`marker\_end`\ (default="END"). - Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. | **required**: False | **type**: str @@ -63,7 +63,7 @@ marker block The text to insert inside the marker lines. - Multi-line can be separated by '\n'. + Multi-line can be separated by '\\n'. Any double-quotation marks will be removed. @@ -74,11 +74,11 @@ block insertafter If specified, the block will be inserted after the last match of the specified regular expression. - A special value ``EOF`` for inserting a block at the end of the file is available. + A special value \ :literal:`EOF`\ for inserting a block at the end of the file is available. - If a specified regular expression has no matches, ``EOF`` will be used instead. + If a specified regular expression has no matches, \ :literal:`EOF`\ will be used instead. - Choices are EOF or '*regex*'. + Choices are EOF or '\*regex\*'. Default is EOF. @@ -89,18 +89,18 @@ insertafter insertbefore If specified, the block will be inserted before the last match of specified regular expression. - A special value ``BOF`` for inserting the block at the beginning of the file is available. + A special value \ :literal:`BOF`\ for inserting the block at the beginning of the file is available. If a specified regular expression has no matches, the block will be inserted at the end of the file. - Choices are BOF or '*regex*'. + Choices are BOF or '\*regex\*'. | **required**: False | **type**: str marker_begin - This will be inserted at ``{mark}`` in the opening ansible block marker. + This will be inserted at \ :literal:`{mark}`\ in the opening ansible block marker. | **required**: False | **type**: str @@ -108,7 +108,7 @@ marker_begin marker_end - This will be inserted at ``{mark}`` in the closing ansible block marker. + This will be inserted at \ :literal:`{mark}`\ in the closing ansible block marker. | **required**: False | **type**: str @@ -116,9 +116,9 @@ marker_end backup - Specifies whether a backup of destination should be created before editing the source *src*. + Specifies whether a backup of destination should be created before editing the source \ :emphasis:`src`\ . - When set to ``true``, the module creates a backup file or data set. + When set to \ :literal:`true`\ , the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -130,15 +130,15 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source *src* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name name must be an MVS data set name, and the dataset must not be preallocated. + If the source is an MVS data set, the backup\_name name must be an MVS data set name, and the dataset must not be preallocated. - If the backup_name is not provided, the default backup_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default backup\_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. - If *src* is a data set member and backup_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If \ :emphasis:`src`\ is a data set member and backup\_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str @@ -147,14 +147,14 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str encoding - The character set of the source *src*. `zos_blockinfile <./zos_blockinfile.html>`_ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source \ :emphasis:`src`\ . \ `zos\_blockinfile <./zos_blockinfile.html>`__\ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -168,7 +168,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . | **required**: False | **type**: bool @@ -290,13 +290,13 @@ Notes .. note:: It is the playbook author or user's responsibility to avoid files that should not be encoded, such as binary files. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. - All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. + All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . - When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. + When using \`\`with\_\*\`\` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - When more then one block should be handled in a file you must change the *marker* per task. + When more then one block should be handled in a file you must change the \ :emphasis:`marker`\ per task. @@ -315,7 +315,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. + Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 00e274b00..5ea5bf3ef 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -16,7 +16,7 @@ zos_copy -- Copy data to z/OS Synopsis -------- -- The `zos_copy <./zos_copy.html>`_ module copies a file or data set from a local or a remote machine to a location on the remote machine. +- The \ `zos\_copy <./zos_copy.html>`__\ module copies a file or data set from a local or a remote machine to a location on the remote machine. @@ -27,17 +27,17 @@ Parameters asa_text - If set to ``true``, indicates that either ``src`` or ``dest`` or both contain ASA control characters. + If set to \ :literal:`true`\ , indicates that either \ :literal:`src`\ or \ :literal:`dest`\ or both contain ASA control characters. - When ``src`` is a USS file and ``dest`` is a data set, the copy will preserve ASA control characters in the destination. + When \ :literal:`src`\ is a USS file and \ :literal:`dest`\ is a data set, the copy will preserve ASA control characters in the destination. - When ``src`` is a data set containing ASA control characters and ``dest`` is a USS file, the copy will put all control characters as plain text in the destination. + When \ :literal:`src`\ is a data set containing ASA control characters and \ :literal:`dest`\ is a USS file, the copy will put all control characters as plain text in the destination. - If ``dest`` is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). + If \ :literal:`dest`\ is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). - If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. + If neither \ :literal:`src`\ or \ :literal:`dest`\ have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + This option is only valid for text files. If \ :literal:`is\_binary`\ is \ :literal:`true`\ or \ :literal:`executable`\ is \ :literal:`true`\ as well, the module will fail. | **required**: False | **type**: bool @@ -47,7 +47,7 @@ asa_text backup Specifies whether a backup of the destination should be created before copying data. - When set to ``true``, the module creates a backup file or data set. + When set to \ :literal:`true`\ , the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -59,24 +59,24 @@ backup backup_name Specify a unique USS file name or data set name for the destination backup. - If the destination ``dest`` is a USS file or path, the ``backup_name`` must be an absolute path name. + If the destination \ :literal:`dest`\ is a USS file or path, the \ :literal:`backup\_name`\ must be an absolute path name. - If the destination is an MVS data set name, the ``backup_name`` provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. + If the destination is an MVS data set name, the \ :literal:`backup\_name`\ provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. - If the ``backup_name`` is not provided, the default ``backup_name`` will be used. If the ``dest`` is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the ``dest`` is an MVS data set, it will be a data set with a randomly generated name. + If the \ :literal:`backup\_name`\ is not provided, the default \ :literal:`backup\_name`\ will be used. If the \ :literal:`dest`\ is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the \ :literal:`dest`\ is an MVS data set, it will be a data set with a randomly generated name. - If ``dest`` is a data set member and ``backup_name`` is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If \ :literal:`dest`\ is a data set member and \ :literal:`backup\_name`\ is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str content - When used instead of ``src``, sets the contents of a file or data set directly to the specified value. + When used instead of \ :literal:`src`\ , sets the contents of a file or data set directly to the specified value. - Works only when ``dest`` is a USS file, sequential data set, or a partitioned data set member. + Works only when \ :literal:`dest`\ is a USS file, sequential data set, or a partitioned data set member. - If ``dest`` is a directory, then content will be copied to ``/path/to/dest/inline_copy``. + If \ :literal:`dest`\ is a directory, then content will be copied to \ :literal:`/path/to/dest/inline\_copy`\ . | **required**: False | **type**: str @@ -85,27 +85,27 @@ content dest The remote absolute path or data set where the content should be copied to. - ``dest`` can be a USS file, directory or MVS data set name. + \ :literal:`dest`\ can be a USS file, directory or MVS data set name. - If ``dest`` has missing parent directories, they will be created. + If \ :literal:`dest`\ has missing parent directories, they will be created. - If ``dest`` is a nonexistent USS file, it will be created. + If \ :literal:`dest`\ is a nonexistent USS file, it will be created. - If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the copy will fail. + If \ :literal:`dest`\ is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail. - If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. + If \ :literal:`dest`\ is a nonexistent data set, it will be created following the process outlined here and in the \ :literal:`volume`\ option. - If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + If \ :literal:`dest`\ is a nonexistent data set, the attributes assigned will depend on the type of \ :literal:`src`\ . If \ :literal:`src`\ is a USS file, \ :literal:`dest`\ will have a Fixed Block (FB) record format and the remaining attributes will be computed. If \ :emphasis:`is\_binary=true`\ , \ :literal:`dest`\ will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If \ :emphasis:`executable=true`\ ,\ :literal:`dest`\ will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. - When ``dest`` is a data set, precedence rules apply. If ``dest_data_set`` is set, this will take precedence over an existing data set. If ``dest`` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, ``dest`` will be created with the same attributes of ``src``. + When \ :literal:`dest`\ is a data set, precedence rules apply. If \ :literal:`dest\_data\_set`\ is set, this will take precedence over an existing data set. If \ :literal:`dest`\ is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, \ :literal:`dest`\ will be created with the same attributes of \ :literal:`src`\ . - When the ``dest`` is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) ``dest`` will be deleted and recreated following the process outlined in the ``volume`` option. + When the \ :literal:`dest`\ is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) \ :literal:`dest`\ will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. - When the ``dest`` is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the ``volume`` option. + When the \ :literal:`dest`\ is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. - When ``dest`` is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the ``volume`` option. + When \ :literal:`dest`\ is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. - When ``dest`` is a data set, you can override storage management rules by specifying ``volume`` if the storage class being used has GUARANTEED_SPACE=YES specified, otherwise, the allocation will fail. See ``volume`` for more volume related processes. + When \ :literal:`dest`\ is a data set, you can override storage management rules by specifying \ :literal:`volume`\ if the storage class being used has GUARANTEED\_SPACE=YES specified, otherwise, the allocation will fail. See \ :literal:`volume`\ for more volume related processes. | **required**: True | **type**: str @@ -114,9 +114,9 @@ dest encoding Specifies which encodings the destination file or data set should be converted from and to. - If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. + If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - Only valid if ``is_binary`` is false. + Only valid if \ :literal:`is\_binary`\ is false. | **required**: False | **type**: dict @@ -132,7 +132,7 @@ encoding to The encoding to be converted to - | **required**: True + | **required**: False | **type**: str @@ -140,22 +140,22 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str force - If set to ``true`` and the remote file or data set ``dest`` is empty, the ``dest`` will be reused. + If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is empty, the \ :literal:`dest`\ will be reused. - If set to ``true`` and the remote file or data set ``dest`` is NOT empty, the ``dest`` will be deleted and recreated with the ``src`` data set attributes, otherwise it will be recreated with the ``dest`` data set attributes. + If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is NOT empty, the \ :literal:`dest`\ will be deleted and recreated with the \ :literal:`src`\ data set attributes, otherwise it will be recreated with the \ :literal:`dest`\ data set attributes. - To backup data before any deletion, see parameters ``backup`` and ``backup_name``. + To backup data before any deletion, see parameters \ :literal:`backup`\ and \ :literal:`backup\_name`\ . - If set to ``false``, the file or data set will only be copied if the destination does not exist. + If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. - If set to ``false`` and destination exists, the module exits with a note to the user. + If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -163,11 +163,11 @@ force force_lock - By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. + By default, when \ :literal:`dest`\ is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use \ :literal:`force\_lock`\ to bypass this check and continue with copy. - If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. + If set to \ :literal:`true`\ and destination is a MVS data set opened by another process then zos\_copy will try to copy using DISP=SHR. - Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. + Using \ :literal:`force\_lock`\ uses operations that are subject to race conditions and can lead to data loss, use with caution. If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. @@ -177,9 +177,9 @@ force_lock ignore_sftp_stderr - During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. + During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . | **required**: False | **type**: bool @@ -187,11 +187,11 @@ ignore_sftp_stderr is_binary - If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. + If set to \ :literal:`true`\ , indicates that the file or data set to be copied is a binary file or data set. - When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. + When \ :emphasis:`is\_binary=true`\ , no encoding conversion is applied to the content, all content transferred retains the original state. - Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. + Use \ :emphasis:`is\_binary=true`\ when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -199,15 +199,15 @@ is_binary executable - If set to ``true``, indicates that the file or library to be copied is an executable. + If set to \ :literal:`true`\ , indicates that the file or library to be copied is an executable. - If the ``src`` executable has an alias, the alias information is also copied. If the ``dest`` is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. + If the \ :literal:`src`\ executable has an alias, the alias information is also copied. If the \ :literal:`dest`\ is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. - If *executable=true*, and ``dest`` is a data set, it must be a PDS or PDSE (library). + If \ :emphasis:`executable=true`\ , and \ :literal:`dest`\ is a data set, it must be a PDS or PDSE (library). - If ``dest`` is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. + If \ :literal:`dest`\ is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. - If ``dest`` is a file, execute permission for the user will be added to the file (``u+x``). + If \ :literal:`dest`\ is a file, execute permission for the user will be added to the file (\`\`u+x\`\`). | **required**: False | **type**: bool @@ -215,9 +215,9 @@ executable aliases - If set to ``true``, indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + If set to \ :literal:`true`\ , indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. - Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when ``executable=True`` and ``dest`` is a USS file or directory, this option will be ignored. + Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when \ :literal:`executable=True`\ and \ :literal:`dest`\ is a USS file or directory, this option will be ignored. Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. @@ -234,25 +234,47 @@ local_follow | **default**: True +group + Name of the group that will own the file system objects. + + When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + + | **required**: False + | **type**: str + + mode The permission of the destination file or directory. - If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + + The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. + + | **required**: False + | **type**: str + + +owner + Name of the user that should own the filesystem object, as would be passed to the chown command. - The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. + When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - *mode=preserve* means that the file will be given the same permissions as the source file. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str remote_src - If set to ``false``, the module searches for ``src`` at the local machine. + If set to \ :literal:`false`\ , the module searches for \ :literal:`src`\ at the local machine. - If set to ``true``, the module goes to the remote/target machine for ``src``. + If set to \ :literal:`true`\ , the module goes to the remote/target machine for \ :literal:`src`\ . | **required**: False | **type**: bool @@ -262,23 +284,23 @@ remote_src src Path to a file/directory or name of a data set to copy to remote z/OS system. - If ``remote_src`` is true, then ``src`` must be the path to a Unix System Services (USS) file, name of a data set, or data set member. + If \ :literal:`remote\_src`\ is true, then \ :literal:`src`\ must be the path to a Unix System Services (USS) file, name of a data set, or data set member. - If ``src`` is a local path or a USS path, it can be absolute or relative. + If \ :literal:`src`\ is a local path or a USS path, it can be absolute or relative. - If ``src`` is a directory, ``dest`` must be a partitioned data set or a USS directory. + If \ :literal:`src`\ is a directory, \ :literal:`dest`\ must be a partitioned data set or a USS directory. - If ``src`` is a file and ``dest`` ends with "/" or is a directory, the file is copied to the directory with the same filename as ``src``. + If \ :literal:`src`\ is a file and \ :literal:`dest`\ ends with "/" or is a directory, the file is copied to the directory with the same filename as \ :literal:`src`\ . - If ``src`` is a directory and ends with "/", the contents of it will be copied into the root of ``dest``. If it doesn't end with "/", the directory itself will be copied. + If \ :literal:`src`\ is a directory and ends with "/", the contents of it will be copied into the root of \ :literal:`dest`\ . If it doesn't end with "/", the directory itself will be copied. - If ``src`` is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. + If \ :literal:`src`\ is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. - If ``src`` is a VSAM data set, ``dest`` must also be a VSAM. + If \ :literal:`src`\ is a VSAM data set, \ :literal:`dest`\ must also be a VSAM. Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. - Required unless using ``content``. + Required unless using \ :literal:`content`\ . | **required**: False | **type**: str @@ -295,22 +317,22 @@ validate volume - If ``dest`` does not exist, specify which volume ``dest`` should be allocated to. + If \ :literal:`dest`\ does not exist, specify which volume \ :literal:`dest`\ should be allocated to. Only valid when the destination is an MVS data set. The volume must already be present on the device. - If no volume is specified, storage management rules will be used to determine the volume where ``dest`` will be allocated. + If no volume is specified, storage management rules will be used to determine the volume where \ :literal:`dest`\ will be allocated. - If the storage administrator has specified a system default unit name and you do not set a ``volume`` name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. + If the storage administrator has specified a system default unit name and you do not set a \ :literal:`volume`\ name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. | **required**: False | **type**: str dest_data_set - Data set attributes to customize a ``dest`` data set to be copied into. + Data set attributes to customize a \ :literal:`dest`\ data set to be copied into. | **required**: False | **type**: dict @@ -321,22 +343,22 @@ dest_data_set | **required**: True | **type**: str - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC, LIBRARY + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, member, basic, library space_primary - If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int space_secondary - If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -345,21 +367,21 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk record_format - If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str - | **choices**: FB, VB, FBA, VBA, U + | **choices**: fb, vb, fba, vba, u record_length @@ -390,9 +412,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -401,9 +423,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -450,13 +472,13 @@ dest_data_set use_template - Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when ``src`` is a local file or directory. + Only valid when \ :literal:`src`\ is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ | **required**: False | **type**: bool @@ -466,9 +488,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . - These options are ignored unless ``use_template`` is true. + These options are ignored unless \ :literal:`use\_template`\ is true. | **required**: False | **type**: dict @@ -547,7 +569,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -743,11 +765,11 @@ Examples remote_src: true volume: '222222' dest_data_set: - type: SEQ + type: seq space_primary: 10 space_secondary: 3 - space_type: K - record_format: VB + space_type: k + record_format: vb record_length: 150 - name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL @@ -781,17 +803,17 @@ Notes .. note:: Destination data sets are assumed to be in catalog. When trying to copy to an uncataloged data set, the module assumes that the data set does not exist and will create it. - Destination will be backed up if either ``backup`` is ``true`` or ``backup_name`` is provided. If ``backup`` is ``false`` but ``backup_name`` is provided, task will fail. + Destination will be backed up if either \ :literal:`backup`\ is \ :literal:`true`\ or \ :literal:`backup\_name`\ is provided. If \ :literal:`backup`\ is \ :literal:`false`\ but \ :literal:`backup\_name`\ is provided, task will fail. When copying local files or directories, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file or directory being copied. Temporary files will always be deleted, regardless of success or failure of the copy task. VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. + Beginning in version 1.8.x, zos\_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option \ :literal:`executable`\ that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos\_copy.html) error. @@ -846,12 +868,12 @@ destination_attributes { "block_size": 32760, - "record_format": "FB", + "record_format": "fb", "record_length": 45, "space_primary": 2, "space_secondary": 1, - "space_type": "K", - "type": "PDSE" + "space_type": "k", + "type": "pdse" } block_size @@ -864,7 +886,7 @@ destination_attributes Record format of the dataset. | **type**: str - | **sample**: FB + | **sample**: fb record_length Record length of the dataset. @@ -888,17 +910,17 @@ destination_attributes Unit of measurement for space. | **type**: str - | **sample**: K + | **sample**: k type Type of dataset allocated. | **type**: str - | **sample**: PDSE + | **sample**: pdse checksum - SHA256 checksum of the file after running zos_copy. + SHA256 checksum of the file after running zos\_copy. | **returned**: When ``validate=true`` and if ``dest`` is USS | **type**: str diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 0ea34875f..3300c7d40 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -28,11 +28,11 @@ Parameters name - The name of the data set being managed. (e.g ``USER.TEST``) + The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) - If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if *type=MEMBER* or *state!=present* and not using *batch*. + Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ and not using \ :emphasis:`batch`\ . | **required**: False | **type**: str @@ -41,49 +41,49 @@ name state The final state desired for specified data set. - If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. + If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . | **required**: False @@ -93,22 +93,22 @@ state type - The data set type to be used when creating a data set. (e.g ``pdse``) + The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ). - ``MEMBER`` expects to be used with an existing partitioned data set. + \ :literal:`member`\ expects to be used with an existing partitioned data set. Choices are case-sensitive. | **required**: False | **type**: str - | **default**: PDS - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, LIBRARY, BASIC, LARGE, MEMBER, HFS, ZFS + | **default**: pds + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -118,7 +118,7 @@ space_primary space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -128,25 +128,25 @@ space_secondary space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **default**: M - | **choices**: K, M, G, CYL, TRK + | **default**: m + | **choices**: k, m, g, cyl, trk record_format - The format of the data set. (e.g ``FB``) + The format of the data set. (e.g \ :literal:`FB`\ ) Choices are case-sensitive. - When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. + When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . | **required**: False | **type**: str - | **default**: FB - | **choices**: FB, VB, FBA, VBA, U, F + | **default**: fb + | **choices**: fb, vb, fba, vba, u, f sms_storage_class @@ -216,9 +216,9 @@ directory_blocks key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -227,28 +227,28 @@ key_offset key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int volumes - If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. + If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. - If creating a data set, *volumes* specifies the volume(s) where the data set should be created. + If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. - If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - *volumes* is required when *state=cataloged*. + \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . Accepts a string when using a single volume and a list of strings when using multiple. @@ -257,12 +257,12 @@ volumes replace - When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. + When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If *replace=True*, all data in the original data set will be lost. + If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. | **required**: False | **type**: bool @@ -272,7 +272,7 @@ replace tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -283,9 +283,9 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . - The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . | **required**: False | **type**: bool @@ -301,11 +301,11 @@ batch name - The name of the data set being managed. (e.g ``USER.TEST``) + The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) - If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if *type=MEMBER* or *state!=present* + Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ | **required**: False | **type**: str @@ -314,49 +314,49 @@ batch state The final state desired for specified data set. - If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. + If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . | **required**: False @@ -366,22 +366,22 @@ batch type - The data set type to be used when creating a data set. (e.g ``PDSE``) + The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ) - ``MEMBER`` expects to be used with an existing partitioned data set. + \ :literal:`member`\ expects to be used with an existing partitioned data set. Choices are case-sensitive. | **required**: False | **type**: str - | **default**: PDS - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, LIBRARY, BASIC, LARGE, MEMBER, HFS, ZFS + | **default**: pds + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -391,7 +391,7 @@ batch space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -401,25 +401,25 @@ batch space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **default**: M - | **choices**: K, M, G, CYL, TRK + | **default**: m + | **choices**: k, m, g, cyl, trk record_format - The format of the data set. (e.g ``FB``) + The format of the data set. (e.g \ :literal:`FB`\ ) Choices are case-sensitive. - When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. + When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . | **required**: False | **type**: str - | **default**: FB - | **choices**: FB, VB, FBA, VBA, U, F + | **default**: fb + | **choices**: fb, vb, fba, vba, u, f sms_storage_class @@ -489,9 +489,9 @@ batch key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -500,28 +500,28 @@ batch key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int volumes - If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. + If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. - If creating a data set, *volumes* specifies the volume(s) where the data set should be created. + If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. - If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - *volumes* is required when *state=cataloged*. + \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . Accepts a string when using a single volume and a list of strings when using multiple. @@ -530,12 +530,12 @@ batch replace - When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. + When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If *replace=True*, all data in the original data set will be lost. + If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. | **required**: False | **type**: bool @@ -547,9 +547,9 @@ batch This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . - The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . | **required**: False | **type**: bool @@ -568,7 +568,7 @@ Examples - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: SEQ + type: seq state: present - name: Create a PDS data set if it does not exist @@ -576,27 +576,27 @@ Examples name: someds.name.here type: pds space_primary: 5 - space_type: M - record_format: FBA + space_type: m + record_format: fba record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 volumes: "222222" replace: yes @@ -604,19 +604,19 @@ Examples - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: ESDS + type: esds - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: KSDS + type: ksds key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: RRDS + type: rrds sms_storage_class: mydata - name: Delete a data set if it exists @@ -633,43 +633,43 @@ Examples - name: Write a member to an existing PDS; replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member replace: yes - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member - name: Remove a member from an existing PDS zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member - name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member force: yes - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - name: someds.name.here1 - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: FB + space_type: m + record_format: fb replace: yes - name: someds.name.here1(member1) - type: MEMBER + type: member - name: someds.name.here2(member1) - type: MEMBER + type: member replace: yes - name: someds.name.here2(member2) - type: MEMBER + type: member - name: Catalog a data set present on volume 222222 if it is uncataloged. zos_data_set: diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 4c2294e24..68089a3a6 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -37,7 +37,7 @@ encoding from - The character set of the source *src*. + The character set of the source \ :emphasis:`src`\ . | **required**: False | **type**: str @@ -45,7 +45,7 @@ encoding to - The destination *dest* character set for the output to be written as. + The destination \ :emphasis:`dest`\ character set for the output to be written as. | **required**: False | **type**: str @@ -58,7 +58,7 @@ src The USS path or file must be an absolute pathname. - If *src* is a USS directory, all files will be encoded. + If \ :emphasis:`src`\ is a USS directory, all files will be encoded. | **required**: True | **type**: str @@ -67,11 +67,11 @@ src dest The location where the converted characters are output. - The destination *dest* can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). + The destination \ :emphasis:`dest`\ can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). - If the length of the PDSE member name used in *dest* is greater than 8 characters, the member name will be truncated when written out. + If the length of the PDSE member name used in \ :emphasis:`dest`\ is greater than 8 characters, the member name will be truncated when written out. - If *dest* is not specified, the *src* will be used as the destination and will overwrite the *src* with the character set in the option *to_encoding*. + If \ :emphasis:`dest`\ is not specified, the \ :emphasis:`src`\ will be used as the destination and will overwrite the \ :emphasis:`src`\ with the character set in the option \ :emphasis:`to\_encoding`\ . The USS file or path must be an absolute pathname. @@ -80,9 +80,9 @@ dest backup - Creates a backup file or backup data set for *dest*, including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for \ :emphasis:`dest`\ , including the timestamp information to ensure that you retrieve the original file. - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . | **required**: False | **type**: bool @@ -92,13 +92,13 @@ backup backup_name Specify the USS file name or data set name for the dest backup. - If dest is a USS file or path, *backup_name* must be a file or path name, and the USS path or file must be an absolute pathname. + If dest is a USS file or path, \ :emphasis:`backup\_name`\ must be a file or path name, and the USS path or file must be an absolute pathname. - If dest is an MVS data set, the *backup_name* must be an MVS data set name. + If dest is an MVS data set, the \ :emphasis:`backup\_name`\ must be an MVS data set name. - If *backup_name* is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. + If \ :emphasis:`backup\_name`\ is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file\_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. - ``backup_name`` will be returned on either success or failure of module execution such that data can be retrieved. + \ :literal:`backup\_name`\ will be returned on either success or failure of module execution such that data can be retrieved. | **required**: False | **type**: str @@ -107,7 +107,7 @@ backup_name backup_compress Determines if backups to USS files or paths should be compressed. - *backup_compress* is only used when *backup=true*. + \ :emphasis:`backup\_compress`\ is only used when \ :emphasis:`backup=true`\ . | **required**: False | **type**: bool @@ -117,7 +117,7 @@ backup_compress tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -265,7 +265,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . @@ -278,7 +278,7 @@ Return Values src - The location of the input characters identified in option *src*. + The location of the input characters identified in option \ :emphasis:`src`\ . | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 87a50a65a..7cdcabbd5 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -20,7 +20,7 @@ Synopsis - When fetching a sequential data set, the destination file name will be the same as the data set name. - When fetching a PDS or PDSE, the destination will be a directory with the same name as the PDS or PDSE. - When fetching a PDS/PDSE member, destination will be a file. -- Files that already exist at ``dest`` will be overwritten if they are different than ``src``. +- Files that already exist at \ :literal:`dest`\ will be overwritten if they are different than \ :literal:`src`\ . @@ -96,7 +96,7 @@ encoding from - The character set of the source *src*. + The character set of the source \ :emphasis:`src`\ . Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -105,7 +105,7 @@ encoding to - The destination *dest* character set for the output to be written as. + The destination \ :emphasis:`dest`\ character set for the output to be written as. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -117,16 +117,16 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str ignore_sftp_stderr - During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. + During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . | **required**: False | **type**: bool @@ -196,13 +196,13 @@ Notes .. note:: When fetching PDSE and VSAM data sets, temporary storage will be used on the remote z/OS system. After the PDSE or VSAM data set is successfully transferred, the temporary storage will be deleted. The size of the temporary storage will correspond to the size of PDSE or VSAM data set being fetched. If module execution fails, the temporary storage will be deleted. - To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the ``checksum`` parameter. + To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the \ :literal:`checksum`\ parameter. All data sets are always assumed to be cataloged. If an uncataloged data set needs to be fetched, it should be cataloged first. Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -263,7 +263,7 @@ data_set_type | **sample**: PDSE note - Notice of module failure when ``fail_on_missing`` is false. + Notice of module failure when \ :literal:`fail\_on\_missing`\ is false. | **returned**: failure and fail_on_missing=false | **type**: str diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index f195b2c2c..83082b5c0 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -18,7 +18,7 @@ Synopsis -------- - Return a list of data sets based on specific criteria. - Multiple criteria can be added (AND'd) together. -- The ``zos_find`` module can only find MVS data sets. Use the `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`_ module to find USS files. +- The \ :literal:`zos\_find`\ module can only find MVS data sets. Use the \ `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`__\ module to find USS files. @@ -44,9 +44,9 @@ age age_stamp Choose the age property against which to compare age. - ``creation_date`` is the date the data set was created and ``ref_date`` is the date the data set was last referenced. + \ :literal:`creation\_date`\ is the date the data set was created and \ :literal:`ref\_date`\ is the date the data set was last referenced. - ``ref_date`` is only applicable to sequential and partitioned data sets. + \ :literal:`ref\_date`\ is only applicable to sequential and partitioned data sets. | **required**: False | **type**: str @@ -80,7 +80,7 @@ patterns This parameter expects a list, which can be either comma separated or YAML. - If ``pds_patterns`` is provided, ``patterns`` must be member patterns. + If \ :literal:`pds\_patterns`\ is provided, \ :literal:`patterns`\ must be member patterns. When searching for members within a PDS/PDSE, pattern can be a regular expression. @@ -107,7 +107,7 @@ pds_patterns Required when searching for data set members. - Valid only for ``nonvsam`` resource types. Otherwise ignored. + Valid only for \ :literal:`nonvsam`\ resource types. Otherwise ignored. | **required**: False | **type**: list @@ -117,9 +117,9 @@ pds_patterns resource_type The type of resource to search. - ``nonvsam`` refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. + \ :literal:`nonvsam`\ refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. - ``cluster`` refers to a VSAM cluster. The ``data`` and ``index`` are the data and index components of a VSAM cluster. + \ :literal:`cluster`\ refers to a VSAM cluster. The \ :literal:`data`\ and \ :literal:`index`\ are the data and index components of a VSAM cluster. | **required**: False | **type**: str @@ -192,11 +192,11 @@ Notes ----- .. note:: - Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. + Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. - The `zos_find <./zos_find.html>`_ module currently does not support wildcards for high level qualifiers. For example, ``SOME.*.DATA.SET`` is a valid pattern, but ``*.DATA.SET`` is not. + The \ `zos\_find <./zos_find.html>`__\ module currently does not support wildcards for high level qualifiers. For example, \ :literal:`SOME.\*.DATA.SET`\ is a valid pattern, but \ :literal:`\*.DATA.SET`\ is not. - If a data set pattern is specified as ``USER.*``, the matching data sets will have two name segments such as ``USER.ABC``, ``USER.XYZ`` etc. If a wildcard is specified as ``USER.*.ABC``, the matching data sets will have three name segments such as ``USER.XYZ.ABC``, ``USER.TEST.ABC`` etc. + If a data set pattern is specified as \ :literal:`USER.\*`\ , the matching data sets will have two name segments such as \ :literal:`USER.ABC`\ , \ :literal:`USER.XYZ`\ etc. If a wildcard is specified as \ :literal:`USER.\*.ABC`\ , the matching data sets will have three name segments such as \ :literal:`USER.XYZ.ABC`\ , \ :literal:`USER.TEST.ABC`\ etc. The time taken to execute the module is proportional to the number of data sets present on the system and how large the data sets are. diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 0247ffd96..02a56fd23 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -17,8 +17,8 @@ zos_gather_facts -- Gather z/OS system facts. Synopsis -------- - Retrieve variables from target z/OS systems. -- Variables are added to the *ansible_facts* dictionary, available to playbooks. -- Apply filters on the *gather_subset* list to reduce the variables that are added to the *ansible_facts* dictionary. +- Variables are added to the \ :emphasis:`ansible\_facts`\ dictionary, available to playbooks. +- Apply filters on the \ :emphasis:`gather\_subset`\ list to reduce the variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. @@ -32,7 +32,7 @@ Parameters gather_subset If specified, it will collect facts that come under the specified subset (eg. ipl will return ipl facts). Specifying subsets is recommended to reduce time in gathering facts when the facts needed are in a specific subset. - The following subsets are available ``ipl``, ``cpu``, ``sys``, and ``iodf``. Depending on the version of ZOAU, additional subsets may be available. + The following subsets are available \ :literal:`ipl`\ , \ :literal:`cpu`\ , \ :literal:`sys`\ , and \ :literal:`iodf`\ . Depending on the version of ZOAU, additional subsets may be available. | **required**: False | **type**: list @@ -41,13 +41,13 @@ gather_subset filter - Filter out facts from the *ansible_facts* dictionary. + Filter out facts from the \ :emphasis:`ansible\_facts`\ dictionary. - Uses shell-style `fnmatch <https://docs.python.org/3/library/fnmatch.html>`_ pattern matching to filter out the collected facts. + Uses shell-style \ `fnmatch <https://docs.python.org/3/library/fnmatch.html>`__\ pattern matching to filter out the collected facts. - An empty list means 'no filter', same as providing '*'. + An empty list means 'no filter', same as providing '\*'. - Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the *ansible_facts* dictionary. To restrict the facts that are collected, refer to the *gather_subset* parameter. + Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. To restrict the facts that are collected, refer to the \ :emphasis:`gather\_subset`\ parameter. | **required**: False | **type**: list diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index efea6ea2a..59e37aeb9 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -18,9 +18,9 @@ Synopsis -------- - Display the z/OS job output for a given criteria (Job id/Job name/owner) with/without a data definition name as a filter. - At least provide a job id/job name/owner. -- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC*" or "*". -- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP*" or "*". -- The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". +- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC\*" or "\*". +- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP\*" or "\*". +- The owner can be specific such as "IBMUSER", or one that uses a pattern like "\*". - If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. @@ -32,21 +32,21 @@ Parameters job_id - The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC*") + The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC\*") | **required**: False | **type**: str job_name - The name of the batch job. (e.g "TCPIP", "C*") + The name of the batch job. (e.g "TCPIP", "C\*") | **required**: False | **type**: str owner - The owner who ran the job. (e.g "IBMUSER", "*") + The owner who ran the job. (e.g "IBMUSER", "\*") | **required**: False | **type**: str @@ -97,7 +97,7 @@ Return Values jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. | **returned**: success | **type**: list @@ -416,7 +416,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index ea320dfc3..e4da71341 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -17,8 +17,8 @@ zos_job_query -- Query job status Synopsis -------- - List z/OS job(s) and the current status of the job(s). -- Uses job_name to filter the jobs by the job name. -- Uses job_id to filter the jobs by the job identifier. +- Uses job\_name to filter the jobs by the job name. +- Uses job\_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. @@ -35,9 +35,9 @@ job_name A job name can be up to 8 characters long. - The *job_name* can contain include multiple wildcards. + The \ :emphasis:`job\_name`\ can contain include multiple wildcards. - The asterisk (`*`) wildcard will match zero or more specified characters. + The asterisk (\`\*\`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -56,13 +56,13 @@ owner job_id The job id that has been assigned to the job. - A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. + A job id must begin with \`STC\`, \`JOB\`, \`TSU\` and are followed by up to 5 digits. - When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. + When a job id is greater than 99,999, the job id format will begin with \`S\`, \`J\`, \`T\` and are followed by 7 digits. - The *job_id* can contain include multiple wildcards. + The \ :emphasis:`job\_id`\ can contain include multiple wildcards. - The asterisk (`*`) wildcard will match zero or more specified characters. + The asterisk (\`\*\`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -122,7 +122,7 @@ changed | **type**: bool jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. | **returned**: success | **type**: list @@ -211,7 +211,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 8f4dda61b..964ab8f4b 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -42,24 +42,24 @@ src location - The JCL location. Supported choices are ``DATA_SET``, ``USS`` or ``LOCAL``. + The JCL location. Supported choices are \ :literal:`data\_set`\ , \ :literal:`uss`\ or \ :literal:`local`\ . - DATA_SET can be a PDS, PDSE, or sequential data set. + \ :literal:`data\_set`\ can be a PDS, PDSE, or sequential data set. - USS means the JCL location is located in UNIX System Services (USS). + \ :literal:`uss`\ means the JCL location is located in UNIX System Services (USS). - LOCAL means locally to the ansible control node. + \ :literal:`local`\ means locally to the ansible control node. | **required**: False | **type**: str - | **default**: DATA_SET - | **choices**: DATA_SET, USS, LOCAL + | **default**: data_set + | **choices**: data_set, uss, local wait_time_s - Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. + Option \ :emphasis:`wait\_time\_s`\ is the total time that module \ `zos\_job\_submit <./zos_job_submit.html>`__\ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. + \ :emphasis:`wait\_time\_s`\ is measured in seconds and must be a value greater than 0 and less than 86400. | **required**: False | **type**: int @@ -84,11 +84,11 @@ return_output volume - The volume serial (VOLSER)is where the data set resides. The option is required only when the data set is not cataloged on the system. + The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system. - When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. + When configured, the \ `zos\_job\_submit <./zos_job_submit.html>`__\ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for *location=USS* and *location=LOCAL*. + Ignored for \ :emphasis:`location=uss`\ and \ :emphasis:`location=local`\ . | **required**: False | **type**: str @@ -97,7 +97,7 @@ volume encoding Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - This option is only supported for when *location=LOCAL*. + This option is only supported for when \ :emphasis:`location=local`\ . If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding. @@ -129,13 +129,13 @@ encoding use_template - Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when ``src`` is a local file or directory. + Only valid when \ :literal:`src`\ is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ | **required**: False | **type**: bool @@ -145,9 +145,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . - These options are ignored unless ``use_template`` is true. + These options are ignored unless \ :literal:`use\_template`\ is true. | **required**: False | **type**: dict @@ -226,7 +226,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -267,22 +267,22 @@ Examples .. code-block:: yaml+jinja - - name: Submit JCL in a PDSE member + - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: USS + location: uss return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: LOCAL + location: local encoding: from: ISO8859-1 to: IBM-037 @@ -290,25 +290,25 @@ Examples - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: DATA_SET + location: data_set max_rc: 16 @@ -318,9 +318,9 @@ Notes ----- .. note:: - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -333,7 +333,7 @@ Return Values jobs - List of jobs output. If no job status is found, this will return an empty ret_code with msg_txt explanation. + List of jobs output. If no job status is found, this will return an empty ret\_code with msg\_txt explanation. | **returned**: success | **type**: list @@ -680,25 +680,27 @@ jobs msg Job status resulting from the job submission. - Job status `ABEND` indicates the job ended abnormally. + Job status \`ABEND\` indicates the job ended abnormally. - Job status `AC` indicates the job is active, often a started task or job taking long. + Job status \`AC\` indicates the job is active, often a started task or job taking long. - Job status `CAB` indicates a converter abend. + Job status \`CAB\` indicates a converter abend. - Job status `CANCELED` indicates the job was canceled. + Job status \`CANCELED\` indicates the job was canceled. - Job status `CNV` indicates a converter error. + Job status \`CNV\` indicates a converter error. - Job status `FLU` indicates the job was flushed. + Job status \`FLU\` indicates the job was flushed. - Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + Job status \`JCLERR\` or \`JCL ERROR\` indicates the JCL has an error. - Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + Job status \`SEC\` or \`SEC ERROR\` indicates the job as encountered a security error. - Job status `SYS` indicates a system failure. + Job status \`SYS\` indicates a system failure. - Job status `?` indicates status can not be determined. + Job status \`?\` indicates status can not be determined. + + Jobs where status can not be determined will result in None (NULL). | **type**: str | **sample**: AC @@ -706,18 +708,22 @@ jobs msg_code The return code from the submitted job as a string. + Jobs which have no return code will result in None (NULL), such is the case of a job that errors or is active. + | **type**: str msg_txt Returns additional information related to the submitted job. + Jobs which have no additional information will result in None (NULL). + | **type**: str | **sample**: The job JOB00551 was run with special job processing TYPRUN=SCAN. This will result in no completion, return code or job steps and changed will be false. code The return code converted to an integer value when available. - Jobs which have no return code will return NULL, such is the case of a job that errors or is active. + Jobs which have no return code will result in None (NULL), such is the case of a job that errors or is active. | **type**: int @@ -788,10 +794,3 @@ jobs | **sample**: IEBGENER -message - This option is being deprecated - - | **returned**: success - | **type**: str - | **sample**: Submit JCL operation succeeded. - diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index f7005017e..983e5ca0b 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -40,13 +40,13 @@ src regexp The regular expression to look for in every line of the USS file or data set. - For ``state=present``, the pattern to replace if found. Only the last line found will be replaced. + For \ :literal:`state=present`\ , the pattern to replace if found. Only the last line found will be replaced. - For ``state=absent``, the pattern of the line(s) to remove. + For \ :literal:`state=absent`\ , the pattern of the line(s) to remove. - If the regular expression is not matched, the line will be added to the USS file or data set in keeping with ``insertbefore`` or ``insertafter`` settings. + If the regular expression is not matched, the line will be added to the USS file or data set in keeping with \ :literal:`insertbefore`\ or \ :literal:`insertafter`\ settings. - When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by ``line`` to ensure idempotence. + When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by \ :literal:`line`\ to ensure idempotence. | **required**: False | **type**: str @@ -64,22 +64,22 @@ state line The line to insert/replace into the USS file or data set. - Required for ``state=present``. + Required for \ :literal:`state=present`\ . - If ``backrefs`` is set, may contain backreferences that will get expanded with the ``regexp`` capture groups if the regexp matches. + If \ :literal:`backrefs`\ is set, may contain backreferences that will get expanded with the \ :literal:`regexp`\ capture groups if the regexp matches. | **required**: False | **type**: str backrefs - Used with ``state=present``. + Used with \ :literal:`state=present`\ . - If set, ``line`` can contain backreferences (both positional and named) that will get populated if the ``regexp`` matches. + If set, \ :literal:`line`\ can contain backreferences (both positional and named) that will get populated if the \ :literal:`regexp`\ matches. - This parameter changes the operation of the module slightly; ``insertbefore`` and ``insertafter`` will be ignored, and if the ``regexp`` does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. + This parameter changes the operation of the module slightly; \ :literal:`insertbefore`\ and \ :literal:`insertafter`\ will be ignored, and if the \ :literal:`regexp`\ does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. - If the ``regexp`` does match, the last matching line will be replaced by the expanded line parameter. + If the \ :literal:`regexp`\ does match, the last matching line will be replaced by the expanded line parameter. | **required**: False | **type**: bool @@ -87,23 +87,23 @@ backrefs insertafter - Used with ``state=present``. + Used with \ :literal:`state=present`\ . If specified, the line will be inserted after the last match of specified regular expression. If the first match is required, use(firstmatch=yes). - A special value is available; ``EOF`` for inserting the line at the end of the USS file or data set. + A special value is available; \ :literal:`EOF`\ for inserting the line at the end of the USS file or data set. If the specified regular expression has no matches, EOF will be used instead. - If ``insertbefore`` is set, default value ``EOF`` will be ignored. + If \ :literal:`insertbefore`\ is set, default value \ :literal:`EOF`\ will be ignored. - If regular expressions are passed to both ``regexp`` and ``insertafter``, ``insertafter`` is only honored if no match for ``regexp`` is found. + If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertafter`\ , \ :literal:`insertafter`\ is only honored if no match for \ :literal:`regexp`\ is found. - May not be used with ``backrefs`` or ``insertbefore``. + May not be used with \ :literal:`backrefs`\ or \ :literal:`insertbefore`\ . - Choices are EOF or '*regex*' + Choices are EOF or '\*regex\*' Default is EOF @@ -112,30 +112,30 @@ insertafter insertbefore - Used with ``state=present``. + Used with \ :literal:`state=present`\ . If specified, the line will be inserted before the last match of specified regular expression. - If the first match is required, use ``firstmatch=yes``. + If the first match is required, use \ :literal:`firstmatch=yes`\ . - A value is available; ``BOF`` for inserting the line at the beginning of the USS file or data set. + A value is available; \ :literal:`BOF`\ for inserting the line at the beginning of the USS file or data set. If the specified regular expression has no matches, the line will be inserted at the end of the USS file or data set. - If regular expressions are passed to both ``regexp`` and ``insertbefore``, ``insertbefore`` is only honored if no match for ``regexp`` is found. + If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertbefore`\ , \ :literal:`insertbefore`\ is only honored if no match for \ :literal:`regexp`\ is found. - May not be used with ``backrefs`` or ``insertafter``. + May not be used with \ :literal:`backrefs`\ or \ :literal:`insertafter`\ . - Choices are BOF or '*regex*' + Choices are BOF or '\*regex\*' | **required**: False | **type**: str backup - Creates a backup file or backup data set for *src*, including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for \ :emphasis:`src`\ , including the timestamp information to ensure that you retrieve the original file. - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -147,11 +147,11 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source *src* is a USS file or path, the backup_name must be a file or path name, and the USS file or path must be an absolute path name. + If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name must be an MVS data set name. + If the source is an MVS data set, the backup\_name must be an MVS data set name. - If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -162,16 +162,16 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str firstmatch - Used with ``insertafter`` or ``insertbefore``. + Used with \ :literal:`insertafter`\ or \ :literal:`insertbefore`\ . - If set, ``insertafter`` and ``insertbefore`` will work with the first line that matches the given regular expression. + If set, \ :literal:`insertafter`\ and \ :literal:`insertbefore`\ will work with the first line that matches the given regular expression. | **required**: False | **type**: bool @@ -179,7 +179,7 @@ firstmatch encoding - The character set of the source *src*. `zos_lineinfile <./zos_lineinfile.html>`_ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source \ :emphasis:`src`\ . \ `zos\_lineinfile <./zos_lineinfile.html>`__\ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -193,7 +193,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . | **required**: False | **type**: bool @@ -260,7 +260,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . @@ -273,7 +273,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. + Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 42e8a8ea6..9a30c5c91 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -16,9 +16,9 @@ zos_mount -- Mount a z/OS file system. Synopsis -------- -- The module `zos_mount <./zos_mount.html>`_ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. -- The *src* data set must be unique and a Fully Qualified Name (FQN). -- The *path* will be created if needed. +- The module \ `zos\_mount <./zos_mount.html>`__\ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. +- The \ :emphasis:`src`\ data set must be unique and a Fully Qualified Name (FQN). +- The \ :emphasis:`path`\ will be created if needed. @@ -31,7 +31,7 @@ Parameters path The absolute path name onto which the file system is to be mounted. - The *path* is case sensitive and must be less than or equal 1023 characters long. + The \ :emphasis:`path`\ is case sensitive and must be less than or equal 1023 characters long. | **required**: True | **type**: str @@ -40,9 +40,9 @@ path src The name of the file system to be added to the file system hierarchy. - The file system *src* must be a data set of type *fs_type*. + The file system \ :emphasis:`src`\ must be a data set of type \ :emphasis:`fs\_type`\ . - The file system *src* data set must be cataloged. + The file system \ :emphasis:`src`\ data set must be cataloged. | **required**: True | **type**: str @@ -53,35 +53,35 @@ fs_type The physical file systems data set format to perform the logical mount. - The *fs_type* is required to be uppercase. + The \ :emphasis:`fs\_type`\ is required to be lowercase. | **required**: True | **type**: str - | **choices**: HFS, ZFS, NFS, TFS + | **choices**: hfs, zfs, nfs, tfs state The desired status of the described mount (choice). - If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. + If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are not in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will be updated, the device will be mounted and the module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. + If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will not be updated, the device will not be mounted and the module will complete successfully with \ :emphasis:`changed=False`\ . - If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/data_store*. The device will be unmounted and the module will complete successfully with *changed=True*. + If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are in use, the module will \ :strong:`not`\ add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and the module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/data_store*.The device will remain unchanged and the module will complete with *changed=False*. + If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are not in use, the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ .The device will remain unchanged and the module will complete with \ :emphasis:`changed=False`\ . - If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/data_store* if not present. The module will complete successfully with *changed=True*. + If \ :emphasis:`state=present`\ , the module will add the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/data_store* if present. The module will complete successfully with *changed=True*. + If \ :emphasis:`state=absent`\ , the module will remove the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if present. The module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/data_store*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. + If \ :emphasis:`state=remounted`\ , the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and mounted, the module will complete successfully with \ :emphasis:`changed=True`\ . | **required**: False @@ -91,7 +91,7 @@ state persistent - Add or remove mount command entries to provided *data_store* + Add or remove mount command entries to provided \ :emphasis:`data\_store`\ | **required**: False | **type**: dict @@ -105,9 +105,9 @@ persistent backup - Creates a backup file or backup data set for *data_store*, including the timestamp information to ensure that you retrieve the original parameters defined in *data_store*. + Creates a backup file or backup data set for \ :emphasis:`data\_store`\ , including the timestamp information to ensure that you retrieve the original parameters defined in \ :emphasis:`data\_store`\ . - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -119,11 +119,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_store* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. + If the source \ :emphasis:`data\_store`\ is a USS file or path, the \ :emphasis:`backup\_name`\ name can be relative or absolute for file or path name. - If the source is an MVS data set, the backup_name must be an MVS data set name. + If the source is an MVS data set, the backup\_name must be an MVS data set name. - If the backup_name is not provided, the default *backup_name* will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default \ :emphasis:`backup\_name`\ will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -132,9 +132,9 @@ persistent comment - If provided, this is used as a comment that surrounds the command in the *persistent/data_store* + If provided, this is used as a comment that surrounds the command in the \ :emphasis:`persistent/data\_store`\ - Comments are used to encapsulate the *persistent/data_store* entry such that they can easily be understood and located. + Comments are used to encapsulate the \ :emphasis:`persistent/data\_store`\ entry such that they can easily be understood and located. | **required**: False | **type**: list @@ -145,29 +145,29 @@ persistent unmount_opts Describes how the unmount will be performed. - For more on coded character set identifiers, review the IBM documentation topic **UNMOUNT - Remove a file system from the file hierarchy**. + For more on coded character set identifiers, review the IBM documentation topic \ :strong:`UNMOUNT - Remove a file system from the file hierarchy`\ . | **required**: False | **type**: str - | **default**: NORMAL - | **choices**: DRAIN, FORCE, IMMEDIATE, NORMAL, REMOUNT, RESET + | **default**: normal + | **choices**: drain, force, immediate, normal, remount, reset mount_opts Options available to the mount. - If *mount_opts=RO* on a mounted/remount, mount is performed read-only. + If \ :emphasis:`mount\_opts=ro`\ on a mounted/remount, mount is performed read-only. - If *mount_opts=SAME* and (unmount_opts=REMOUNT), mount is opened in the same mode as previously opened. + If \ :emphasis:`mount\_opts=same`\ and (unmount\_opts=remount), mount is opened in the same mode as previously opened. - If *mount_opts=NOWAIT*, mount is performed asynchronously. + If \ :emphasis:`mount\_opts=nowait`\ , mount is performed asynchronously. - If *mount_opts=NOSECURITY*, security checks are not enforced for files in this file system. + If \ :emphasis:`mount\_opts=nosecurity`\ , security checks are not enforced for files in this file system. | **required**: False | **type**: str - | **default**: RW - | **choices**: RO, RW, SAME, NOWAIT, NOSECURITY + | **default**: rw + | **choices**: ro, rw, same, nowait, nosecurity src_params @@ -184,27 +184,27 @@ tag_untagged When the file system is unmounted, the tags are lost. - If *tag_untagged=NOTEXT* none of the untagged files in the file system are automatically converted during file reading and writing. + If \ :emphasis:`tag\_untagged=notext`\ none of the untagged files in the file system are automatically converted during file reading and writing. - If *tag_untagged=TEXT* each untagged file is implicitly marked as containing pure text data that can be converted. + If \ :emphasis:`tag\_untagged=text`\ each untagged file is implicitly marked as containing pure text data that can be converted. - If this flag is used, use of tag_ccsid is encouraged. + If this flag is used, use of tag\_ccsid is encouraged. | **required**: False | **type**: str - | **choices**: TEXT, NOTEXT + | **choices**: text, notext tag_ccsid Identifies the coded character set identifier (ccsid) to be implicitly set for the untagged file. - For more on coded character set identifiers, review the IBM documentation topic **Coded Character Sets**. + For more on coded character set identifiers, review the IBM documentation topic \ :strong:`Coded Character Sets`\ . Specified as a decimal value from 0 to 65535. However, when TEXT is specified, the value must be between 0 and 65535. The value is not checked as being valid and the corresponding code page is not checked as being installed. - Required when *tag_untagged=TEXT*. + Required when \ :emphasis:`tag\_untagged=TEXT`\ . | **required**: False | **type**: int @@ -214,10 +214,10 @@ allow_uid Specifies whether the SETUID and SETGID mode bits on an executable in this file system are considered. Also determines whether the APF extended attribute or the Program Control extended attribute is honored. - If *allow_uid=True* the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. + If \ :emphasis:`allow\_uid=True`\ the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. - If *allow_uid=False* the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. + If \ :emphasis:`allow\_uid=False`\ the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. | **required**: False @@ -226,10 +226,10 @@ allow_uid sysname - For systems participating in shared file system, *sysname* specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). + For systems participating in shared file system, \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). - *sysname* is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. + \ :emphasis:`sysname`\ is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. | **required**: False @@ -240,23 +240,23 @@ automove These parameters apply only in a sysplex where systems are exploiting the shared file system capability. They specify what happens to the ownership of a file system when a shutdown, PFS termination, dead system takeover, or file system move occurs. The default setting is AUTOMOVE where the file system will be randomly moved to another system (no system list used). - *automove=AUTOMOVE* indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. + \ :emphasis:`automove=automove`\ indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. - *automove=NOAUTOMOVE* prevents movement of the file system's ownership in some situations. + \ :emphasis:`automove=noautomove`\ prevents movement of the file system's ownership in some situations. - *automove=UNMOUNT* allows the file system to be unmounted in some situations. + \ :emphasis:`automove=unmount`\ allows the file system to be unmounted in some situations. | **required**: False | **type**: str - | **default**: AUTOMOVE - | **choices**: AUTOMOVE, NOAUTOMOVE, UNMOUNT + | **default**: automove + | **choices**: automove, noautomove, unmount automove_list - If(automove=AUTOMOVE), this option will be checked. + If(automove=automove), this option will be checked. This specifies the list of servers to include or exclude as destinations. @@ -275,7 +275,7 @@ automove_list tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -293,14 +293,14 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted - name: Unmount a filesystem. zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: unmounted unmount_opts: REMOUNT opts: same @@ -309,7 +309,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: RO @@ -317,7 +317,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -327,7 +327,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -339,7 +339,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted allow_uid: no @@ -347,7 +347,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted opts: nowait @@ -355,7 +355,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: NOSECURITY @@ -363,7 +363,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: I,DEV1,DEV2,DEV3,DEV9 @@ -372,7 +372,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7 @@ -389,7 +389,7 @@ Notes If an uncataloged data set needs to be fetched, it should be cataloged first. - Uncataloged data sets can be cataloged using the `zos_data_set <./zos_data_set.html>`_ module. + Uncataloged data sets can be cataloged using the \ `zos\_data\_set <./zos_data_set.html>`__\ module. @@ -467,7 +467,7 @@ persistent | **sample**: SYS1.FILESYS(PRMAABAK) comment - The text that was used in markers around the *Persistent/data_store* entry. + The text that was used in markers around the \ :emphasis:`Persistent/data\_store`\ entry. | **returned**: always | **type**: list @@ -529,7 +529,7 @@ allow_uid true sysname - *sysname* specifies the particular system on which a mount should be performed. + \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. | **returned**: if Non-None | **type**: str diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 3ebedadd5..c0551786e 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -45,9 +45,9 @@ parm auth Determines whether this program should run with authorized privileges. - If *auth=true*, the program runs as APF authorized. + If \ :emphasis:`auth=true`\ , the program runs as APF authorized. - If *auth=false*, the program runs as unauthorized. + If \ :emphasis:`auth=false`\ , the program runs as unauthorized. | **required**: False | **type**: bool @@ -57,7 +57,7 @@ auth verbose Determines if verbose output should be returned from the underlying utility used by this module. - When *verbose=true* verbose output is returned on module failure. + When \ :emphasis:`verbose=true`\ verbose output is returned on module failure. | **required**: False | **type**: bool @@ -67,19 +67,19 @@ verbose dds The input data source. - *dds* supports 6 types of sources + \ :emphasis:`dds`\ supports 6 types of sources - 1. *dd_data_set* for data set files. + 1. \ :emphasis:`dd\_data\_set`\ for data set files. - 2. *dd_unix* for UNIX files. + 2. \ :emphasis:`dd\_unix`\ for UNIX files. - 3. *dd_input* for in-stream data set. + 3. \ :emphasis:`dd\_input`\ for in-stream data set. - 4. *dd_dummy* for no content input. + 4. \ :emphasis:`dd\_dummy`\ for no content input. - 5. *dd_concat* for a data set concatenation. + 5. \ :emphasis:`dd\_concat`\ for a data set concatenation. - 6. *dds* supports any combination of source types. + 6. \ :emphasis:`dds`\ supports any combination of source types. | **required**: False | **type**: list @@ -89,7 +89,7 @@ dds dd_data_set Specify a data set. - *dd_data_set* can reference an existing data set or be used to define a new data set to be created during execution. + \ :emphasis:`dd\_data\_set`\ can reference an existing data set or be used to define a new data set to be created during execution. | **required**: False | **type**: dict @@ -110,7 +110,7 @@ dds type - The data set type. Only required when *disposition=new*. + The data set type. Only required when \ :emphasis:`disposition=new`\ . Maps to DSNTYPE on z/OS. @@ -120,7 +120,7 @@ dds disposition - *disposition* indicates the status of a data set. + \ :emphasis:`disposition`\ indicates the status of a data set. Defaults to shr. @@ -130,31 +130,31 @@ dds disposition_normal - *disposition_normal* indicates what to do with the data set after a normal termination of the program. + \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after a normal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog disposition_abnormal - *disposition_abnormal* indicates what to do with the data set after an abnormal termination of the program. + \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after an abnormal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog reuse - Determines if a data set should be reused if *disposition=NEW* and if a data set with a matching name already exists. + Determines if a data set should be reused if \ :emphasis:`disposition=new`\ and if a data set with a matching name already exists. - If *reuse=true*, *disposition* will be automatically switched to ``SHR``. + If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . - If *reuse=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *replace*. + Mutually exclusive with \ :emphasis:`replace`\ . - *reuse* is only considered when *disposition=NEW* + \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ | **required**: False | **type**: bool @@ -162,17 +162,17 @@ dds replace - Determines if a data set should be replaced if *disposition=NEW* and a data set with a matching name already exists. + Determines if a data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with a matching name already exists. - If *replace=true*, the original data set will be deleted, and a new data set created. + If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. - If *replace=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *reuse*. + Mutually exclusive with \ :emphasis:`reuse`\ . - *replace* is only considered when *disposition=NEW* + \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ - *replace* will result in loss of all data in the original data set unless *backup* is specified. + \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. | **required**: False | **type**: bool @@ -180,9 +180,9 @@ dds backup - Determines if a backup should be made of an existing data set when *disposition=NEW*, *replace=true*, and a data set with the desired name is found. + Determines if a backup should be made of an existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. - *backup* is only used when *replace=true*. + \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . | **required**: False | **type**: bool @@ -190,7 +190,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. + The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . | **required**: False | **type**: str @@ -200,9 +200,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -211,9 +211,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -231,7 +231,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - *sms_management_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -242,7 +242,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - *sms_storage_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -253,7 +253,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - *sms_data_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -264,7 +264,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -280,9 +280,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - *key_label* is the public name of a protected encryption key in the ICSF key repository. + \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. - *key_label* should only be provided when creating an extended format data set. + \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -304,7 +304,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -313,9 +313,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD1 on z/OS. @@ -339,7 +339,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -348,9 +348,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD2 on z/OS. @@ -363,7 +363,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. + If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -376,20 +376,20 @@ dds The first byte of a logical record is position 0. - Provide *key_offset* only for VSAM key-sequenced data sets. + Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g ``80``). + The logical record length. (e.g \ :literal:`80`\ ). For variable data sets, the length must include the 4-byte prefix area. Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0. - Valid values are (1-32760 for non-vsam, 1-32761 for vsam). + Valid values are (1-32760 for non-VSAM, 1-32761 for VSAM). Maps to LRECL on z/OS. @@ -417,11 +417,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -463,7 +463,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when *status_group=OCREAT*. + Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=OCREAT`\ . The provided path must be absolute. @@ -488,7 +488,7 @@ dds mode - The file access attributes when the UNIX file is created specified in *path*. + The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . Specify the mode as an octal number similarly to chmod. @@ -499,47 +499,47 @@ dds status_group - The status for the UNIX file specified in *path*. + The status for the UNIX file specified in \ :emphasis:`path`\ . - If you do not specify a value for the *status_group* parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the \ :emphasis:`status\_group`\ parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. + \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. - *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. + \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. - *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. + \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. - *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - *ononblock* specifies the following, depending on the type of file + \ :emphasis:`ononblock`\ specifies the following, depending on the type of file For a FIFO special file - 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. + 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. - 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. + 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. - *ononblock* has no effect on other file types. + \ :emphasis:`ononblock`\ has no effect on other file types. - *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. + \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . - When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. + When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -548,7 +548,7 @@ dds access_group - The kind of access to request for the UNIX file specified in *path*. + The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . | **required**: False | **type**: str @@ -556,7 +556,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in *path*. + The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . Maps to FILEDATA on z/OS. @@ -569,7 +569,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -578,7 +578,7 @@ dds record_length The logical record length for the UNIX file. - *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -589,7 +589,7 @@ dds record_format The record format for the UNIX file. - *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -608,11 +608,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -638,7 +638,7 @@ dds dd_input - *dd_input* is used to specify an in-stream data set. + \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -656,15 +656,15 @@ dds content The input contents for the DD. - *dd_input* supports single or multiple lines of input. + \ :emphasis:`dd\_input`\ supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. - When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -682,11 +682,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -696,7 +696,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for *dd_input*, *src_encoding* should generally not need to be changed. + for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. | **required**: False | **type**: str @@ -714,7 +714,7 @@ dds dd_output - Use *dd_output* to specify - Content sent to the DD should be returned to the user. + Use \ :emphasis:`dd\_output`\ to specify - Content sent to the DD should be returned to the user. | **required**: False | **type**: dict @@ -739,11 +739,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -753,7 +753,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for *dd_input*, *src_encoding* should generally not need to be changed. + for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. | **required**: False | **type**: str @@ -771,9 +771,9 @@ dds dd_dummy - Use *dd_dummy* to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. + Use \ :emphasis:`dd\_dummy`\ to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. - *dd_dummy* accepts no content input. + \ :emphasis:`dd\_dummy`\ accepts no content input. | **required**: False | **type**: dict @@ -788,7 +788,7 @@ dds dd_vio - *dd_vio* is used to handle temporary data sets. + \ :emphasis:`dd\_vio`\ is used to handle temporary data sets. VIO data sets reside in the paging space; but, to the problem program and the access method, the data sets appear to reside on a direct access storage device. @@ -807,7 +807,7 @@ dds dd_concat - *dd_concat* is used to specify a data set concatenation. + \ :emphasis:`dd\_concat`\ is used to specify a data set concatenation. | **required**: False | **type**: dict @@ -821,7 +821,7 @@ dds dds - A list of DD statements, which can contain any of the following types: *dd_data_set*, *dd_unix*, and *dd_input*. + A list of DD statements, which can contain any of the following types: \ :emphasis:`dd\_data\_set`\ , \ :emphasis:`dd\_unix`\ , and \ :emphasis:`dd\_input`\ . | **required**: False | **type**: list @@ -831,7 +831,7 @@ dds dd_data_set Specify a data set. - *dd_data_set* can reference an existing data set. The data set referenced with ``data_set_name`` must be allocated before the module `zos_mvs_raw <./zos_mvs_raw.html>`_ is run, you can use `zos_data_set <./zos_data_set.html>`_ to allocate a data set. + \ :emphasis:`dd\_data\_set`\ can reference an existing data set. The data set referenced with \ :literal:`data\_set\_name`\ must be allocated before the module \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ is run, you can use \ `zos\_data\_set <./zos_data_set.html>`__\ to allocate a data set. | **required**: False | **type**: dict @@ -845,7 +845,7 @@ dds type - The data set type. Only required when *disposition=new*. + The data set type. Only required when \ :emphasis:`disposition=new`\ . Maps to DSNTYPE on z/OS. @@ -855,7 +855,7 @@ dds disposition - *disposition* indicates the status of a data set. + \ :emphasis:`disposition`\ indicates the status of a data set. Defaults to shr. @@ -865,31 +865,31 @@ dds disposition_normal - *disposition_normal* indicates what to do with the data set after normal termination of the program. + \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after normal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog disposition_abnormal - *disposition_abnormal* indicates what to do with the data set after abnormal termination of the program. + \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after abnormal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog reuse - Determines if data set should be reused if *disposition=NEW* and a data set with matching name already exists. + Determines if data set should be reused if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. - If *reuse=true*, *disposition* will be automatically switched to ``SHR``. + If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . - If *reuse=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *replace*. + Mutually exclusive with \ :emphasis:`replace`\ . - *reuse* is only considered when *disposition=NEW* + \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ | **required**: False | **type**: bool @@ -897,17 +897,17 @@ dds replace - Determines if data set should be replaced if *disposition=NEW* and a data set with matching name already exists. + Determines if data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. - If *replace=true*, the original data set will be deleted, and a new data set created. + If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. - If *replace=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *reuse*. + Mutually exclusive with \ :emphasis:`reuse`\ . - *replace* is only considered when *disposition=NEW* + \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ - *replace* will result in loss of all data in the original data set unless *backup* is specified. + \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. | **required**: False | **type**: bool @@ -915,9 +915,9 @@ dds backup - Determines if a backup should be made of existing data set when *disposition=NEW*, *replace=true*, and a data set with the desired name is found. + Determines if a backup should be made of existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. - *backup* is only used when *replace=true*. + \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . | **required**: False | **type**: bool @@ -925,7 +925,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. + The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . | **required**: False | **type**: str @@ -935,9 +935,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -946,9 +946,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -966,7 +966,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - *sms_management_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -977,7 +977,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - *sms_storage_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -988,7 +988,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - *sms_data_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -999,7 +999,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -1015,9 +1015,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - *key_label* is the public name of a protected encryption key in the ICSF key repository. + \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. - *key_label* should only be provided when creating an extended format data set. + \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -1039,7 +1039,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -1048,9 +1048,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD1 on z/OS. @@ -1074,7 +1074,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -1083,9 +1083,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD2 on z/OS. @@ -1098,7 +1098,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. + If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -1111,14 +1111,14 @@ dds The first byte of a logical record is position 0. - Provide *key_offset* only for VSAM key-sequenced data sets. + Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g ``80``). + The logical record length. (e.g \ :literal:`80`\ ). For variable data sets, the length must include the 4-byte prefix area. @@ -1152,11 +1152,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -1191,7 +1191,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when *status_group=OCREAT*. + Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=ocreat`\ . The provided path must be absolute. @@ -1216,7 +1216,7 @@ dds mode - The file access attributes when the UNIX file is created specified in *path*. + The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . Specify the mode as an octal number similar to chmod. @@ -1227,47 +1227,47 @@ dds status_group - The status for the UNIX file specified in *path*. + The status for the UNIX file specified in \ :emphasis:`path`\ . - If you do not specify a value for the *status_group* parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the \ :emphasis:`status\_group`\ parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. + \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. - *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. + \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. - *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. + \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. - *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - *ononblock* specifies the following, depending on the type of file + \ :emphasis:`ononblock`\ specifies the following, depending on the type of file For a FIFO special file - 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. + 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. - 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. + 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. - *ononblock* has no effect on other file types. + \ :emphasis:`ononblock`\ has no effect on other file types. - *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. + \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . - When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. + When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -1276,7 +1276,7 @@ dds access_group - The kind of access to request for the UNIX file specified in *path*. + The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . | **required**: False | **type**: str @@ -1284,7 +1284,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in *path*. + The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . Maps to FILEDATA on z/OS. @@ -1297,7 +1297,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -1306,7 +1306,7 @@ dds record_length The logical record length for the UNIX file. - *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -1317,7 +1317,7 @@ dds record_format The record format for the UNIX file. - *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -1336,11 +1336,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -1366,7 +1366,7 @@ dds dd_input - *dd_input* is used to specify an in-stream data set. + \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -1377,15 +1377,15 @@ dds content The input contents for the DD. - *dd_input* supports single or multiple lines of input. + \ :emphasis:`dd\_input`\ supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. - When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -1403,11 +1403,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -1417,7 +1417,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for *dd_input*, *src_encoding* should generally not need to be changed. + for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. | **required**: False | **type**: str @@ -1440,7 +1440,7 @@ dds tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -1756,11 +1756,11 @@ Notes ----- .. note:: - When executing programs using `zos_mvs_raw <./zos_mvs_raw.html>`_, you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. + When executing programs using \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ , you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. - 1. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. + 1. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. - 2. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + 2. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. 3. When executing a program, refer to the programs documentation as each programs requirments can vary fom DDs, instream-data indentation and continuation characters. @@ -1838,7 +1838,7 @@ backups | **type**: str backup_name - The name of the data set containing the backup of content from data set in original_name. + The name of the data set containing the backup of content from data set in original\_name. | **type**: str diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 9ad26d64c..ff1e5fe87 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -52,7 +52,7 @@ wait_time_s This option is helpful on a busy system requiring more time to execute commands. - Setting *wait* can instruct if execution should wait the full *wait_time_s*. + Setting \ :emphasis:`wait`\ can instruct if execution should wait the full \ :emphasis:`wait\_time\_s`\ . | **required**: False | **type**: int diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index b2e99d399..a03a17fdc 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -31,7 +31,7 @@ system If the system name is not specified, all outstanding messages for that system and for the local systems attached to it are returned. - A trailing asterisk, (*) wildcard is supported. + A trailing asterisk, (\*) wildcard is supported. | **required**: False | **type**: str @@ -42,7 +42,7 @@ message_id If the message identifier is not specified, all outstanding messages for all message identifiers are returned. - A trailing asterisk, (*) wildcard is supported. + A trailing asterisk, (\*) wildcard is supported. | **required**: False | **type**: str @@ -53,7 +53,7 @@ job_name If the message job name is not specified, all outstanding messages for all job names are returned. - A trailing asterisk, (*) wildcard is supported. + A trailing asterisk, (\*) wildcard is supported. | **required**: False | **type**: str @@ -69,24 +69,24 @@ message_filter filter - Specifies the substring or regex to match to the outstanding messages, see *use_regex*. + Specifies the substring or regex to match to the outstanding messages, see \ :emphasis:`use\_regex`\ . All special characters in a filter string that are not a regex are escaped. - Valid Python regular expressions are supported. See `the official documentation <https://docs.python.org/library/re.html>`_ for more information. + Valid Python regular expressions are supported. See \ `the official documentation <https://docs.python.org/library/re.html>`__\ for more information. - Regular expressions are compiled with the flag **re.DOTALL** which makes the **'.'** special character match any character including a newline." + Regular expressions are compiled with the flag \ :strong:`re.DOTALL`\ which makes the \ :strong:`'.'`\ special character match any character including a newline." | **required**: True | **type**: str use_regex - Indicates that the value for *filter* is a regex or a string to match. + Indicates that the value for \ :emphasis:`filter`\ is a regex or a string to match. - If False, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. + If False, the module assumes that \ :emphasis:`filter`\ is not a regex and matches the \ :emphasis:`filter`\ substring on the outstanding messages. - If True, the module creates a regex from the *filter* string and matches it to the outstanding messages. + If True, the module creates a regex from the \ :emphasis:`filter`\ string and matches it to the outstanding messages. | **required**: False | **type**: bool @@ -222,7 +222,7 @@ actions | **sample**: STC01537 message_text - Content of the outstanding message requiring operator action awaiting a reply. If *message_filter* is set, *message_text* will be filtered accordingly. + Content of the outstanding message requiring operator action awaiting a reply. If \ :emphasis:`message\_filter`\ is set, \ :emphasis:`message\_text`\ will be filtered accordingly. | **returned**: success | **type**: str diff --git a/docs/source/modules/zos_ping.rst b/docs/source/modules/zos_ping.rst index a4405b473..acb901790 100644 --- a/docs/source/modules/zos_ping.rst +++ b/docs/source/modules/zos_ping.rst @@ -16,9 +16,9 @@ zos_ping -- Ping z/OS and check dependencies. Synopsis -------- -- `zos_ping <./zos_ping.html>`_ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. -- `zos_ping <./zos_ping.html>`_ returns ``pong`` when the target host is not missing any required dependencies. -- If the target host is missing optional dependencies, the `zos_ping <./zos_ping.html>`_ will return one or more warning messages. +- \ `zos\_ping <./zos_ping.html>`__\ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. +- \ `zos\_ping <./zos_ping.html>`__\ returns \ :literal:`pong`\ when the target host is not missing any required dependencies. +- If the target host is missing optional dependencies, the \ `zos\_ping <./zos_ping.html>`__\ will return one or more warning messages. - If a required dependency is missing from the target host, an explanatory message will be returned with the module failure. @@ -44,7 +44,7 @@ Notes ----- .. note:: - This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry ``scp_extra_args="-O"`` into the ini file named ``ansible.cfg``. + This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry \ :literal:`scp\_extra\_args="-O"`\ into the ini file named \ :literal:`ansible.cfg`\ . diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 31b237588..6f36e05e2 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -16,7 +16,7 @@ zos_script -- Run scripts in z/OS Synopsis -------- -- The `zos_script <./zos_script.html>`_ module runs a local or remote script in the remote machine. +- The \ `zos\_script <./zos_script.html>`__\ module runs a local or remote script in the remote machine. @@ -56,7 +56,7 @@ creates encoding Specifies which encodings the script should be converted from and to. - If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. + If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. | **required**: False | **type**: dict @@ -87,9 +87,9 @@ executable remote_src - If set to ``false``, the module will search the script in the controller. + If set to \ :literal:`false`\ , the module will search the script in the controller. - If set to ``true``, the module will search the script in the remote machine. + If set to \ :literal:`true`\ , the module will search the script in the remote machine. | **required**: False | **type**: bool @@ -103,13 +103,13 @@ removes use_template - Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when ``src`` is a local file or directory. + Only valid when \ :literal:`src`\ is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ | **required**: False | **type**: bool @@ -119,9 +119,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . - These options are ignored unless ``use_template`` is true. + These options are ignored unless \ :literal:`use\_template`\ is true. | **required**: False | **type**: dict @@ -200,7 +200,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -284,7 +284,7 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. - The location in the z/OS system where local scripts will be copied to can be configured through Ansible's ``remote_tmp`` option. Refer to `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`_ for more information. + The location in the z/OS system where local scripts will be copied to can be configured through Ansible's \ :literal:`remote\_tmp`\ option. Refer to \ `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`__\ for more information. All local scripts copied to a remote z/OS system will be removed from the managed node before the module finishes executing. @@ -292,13 +292,13 @@ Notes The module will only add execution permissions for the file owner. - If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. + If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error \ :literal:`BPXW0003I`\ . - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with \ `zos\_tso\_command <./zos_tso_command.html>`__\ . The community script module does not rely on Python to execute scripts on a managed node, while this module does. Python must be present on the remote machine. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index 4af6b1b52..b35c13a1b 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -40,7 +40,7 @@ commands max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. | **required**: False | **type**: int @@ -119,7 +119,7 @@ output max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. | **returned**: always | **type**: int diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index 91fa597ee..a53747d6c 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -16,8 +16,8 @@ zos_unarchive -- Unarchive files and data sets in z/OS. Synopsis -------- -- The ``zos_unarchive`` module unpacks an archive after optionally transferring it to the remote system. -- For supported archive formats, see option ``format``. +- The \ :literal:`zos\_unarchive`\ module unpacks an archive after optionally transferring it to the remote system. +- For supported archive formats, see option \ :literal:`format`\ . - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. @@ -33,11 +33,11 @@ Parameters src The remote absolute path or data set of the archive to be uncompressed. - *src* can be a USS file or MVS data set name. + \ :emphasis:`src`\ can be a USS file or MVS data set name. USS file paths should be absolute paths. - MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. + MVS data sets supported types are \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . | **required**: True | **type**: str @@ -72,14 +72,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the *xmit_log_data_set* name, ensure there is adequate space. + When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. + If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using \ :literal:`xmit`\ or \ :literal:`terse`\ . | **required**: False | **type**: bool @@ -87,7 +87,7 @@ format dest_volumes - When *use_adrdssu=True*, specify the volume the data sets will be written to. + When \ :emphasis:`use\_adrdssu=True`\ , specify the volume the data sets will be written to. If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. @@ -103,7 +103,7 @@ format dest The remote absolute path or data set where the content should be unarchived to. - *dest* can be a USS file, directory or MVS data set name. + \ :emphasis:`dest`\ can be a USS file, directory or MVS data set name. If dest has missing parent directories, they will not be created. @@ -116,7 +116,7 @@ group When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if ``dest`` is USS, otherwise ignored. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str @@ -125,13 +125,13 @@ group mode The permission of the uncompressed files. - If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. + The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. - *mode=preserve* means that the file will be given the same permissions as the source file. + \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. | **required**: False | **type**: str @@ -149,7 +149,7 @@ owner include A list of directories, files or data set names to extract from the archive. - When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. + When \ :literal:`include`\ is set, only those files will we be extracted leaving the remaining files in the archive. Mutually exclusive with exclude. @@ -177,7 +177,7 @@ list dest_data_set - Data set attributes to customize a ``dest`` data set that the archive will be copied into. + Data set attributes to customize a \ :literal:`dest`\ data set that the archive will be copied into. | **required**: False | **type**: dict @@ -195,23 +195,23 @@ dest_data_set | **required**: False | **type**: str - | **default**: SEQ - | **choices**: SEQ, PDS, PDSE + | **default**: seq + | **choices**: seq, pds, pdse space_primary - If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int space_secondary - If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -220,21 +220,21 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk record_format - If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str - | **choices**: FB, VB, FBA, VBA, U + | **choices**: fb, vb, fba, vba, u record_length @@ -265,9 +265,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -276,9 +276,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -327,7 +327,7 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -342,9 +342,9 @@ force remote_src - If set to true, ``zos_unarchive`` retrieves the archive from the remote system. + If set to true, \ :literal:`zos\_unarchive`\ retrieves the archive from the remote system. - If set to false, ``zos_unarchive`` searches the local machine (Ansible controller) for the archive. + If set to false, \ :literal:`zos\_unarchive`\ searches the local machine (Ansible controller) for the archive. | **required**: False | **type**: bool @@ -404,7 +404,7 @@ Notes .. note:: VSAMs are not supported. - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst index 195435924..25a0897b9 100644 --- a/docs/source/modules/zos_volume_init.rst +++ b/docs/source/modules/zos_volume_init.rst @@ -17,14 +17,14 @@ zos_volume_init -- Initialize volumes or minidisks. Synopsis -------- - Initialize a volume or minidisk on z/OS. -- *zos_volume_init* will create the volume label and entry into the volume table of contents (VTOC). +- \ :emphasis:`zos\_volume\_init`\ will create the volume label and entry into the volume table of contents (VTOC). - Volumes are used for storing data and executable programs. - A minidisk is a portion of a disk that is linked to your virtual machine. - A VTOC lists the data sets that reside on a volume, their location, size, and other attributes. -- *zos_volume_init* uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class `STGADMIN.ICK.INIT`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. -- ICKDSF is an Authorized Program Facility (APF) program on z/OS, *zos_volume_init* will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. +- \ :emphasis:`zos\_volume\_init`\ uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class \`STGADMIN.ICK.INIT\`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. +- ICKDSF is an Authorized Program Facility (APF) program on z/OS, \ :emphasis:`zos\_volume\_init`\ will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. - Note that defaults set on target z/OS systems may override ICKDSF parameters. -- If is recommended that data on the volume is backed up as the *zos_volume_init* module will not perform any backups. You can use the `zos_backup_restore <./zos_backup_restore.html>`_ module to backup a volume. +- If is recommended that data on the volume is backed up as the \ :emphasis:`zos\_volume\_init`\ module will not perform any backups. You can use the \ `zos\_backup\_restore <./zos_backup_restore.html>`__\ module to backup a volume. @@ -35,9 +35,9 @@ Parameters address - *address* is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. + \ :emphasis:`address`\ is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. - *address* can be the number assigned to the device (device number) when it is installed or the virtual address. + \ :emphasis:`address`\ can be the number assigned to the device (device number) when it is installed or the virtual address. | **required**: True | **type**: str @@ -46,15 +46,15 @@ address verify_volid Verify that the volume serial matches what is on the existing volume or minidisk. - *verify_volid* must be 1 to 6 alphanumeric characters or ``*NONE*``. + \ :emphasis:`verify\_volid`\ must be 1 to 6 alphanumeric characters or \ :literal:`\*NONE\*`\ . - To verify that a volume serial number does not exist, use *verify_volid=*NONE**. + To verify that a volume serial number does not exist, use \ :emphasis:`verify\_volid=\*NONE\*`\ . - If *verify_volid* is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. + If \ :emphasis:`verify\_volid`\ is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. - If *verify_volid=*NONE** is specified and a volume serial is found on the volume or minidisk, initialization does not complete. + If \ :emphasis:`verify\_volid=\*NONE\*`\ is specified and a volume serial is found on the volume or minidisk, initialization does not complete. - Note, this option is **not** a boolean, leave it blank to skip the verification. + Note, this option is \ :strong:`not`\ a boolean, leave it blank to skip the verification. | **required**: False | **type**: str @@ -73,11 +73,11 @@ volid Expects 1-6 alphanumeric, national ($,#,@) or special characters. - A *volid* with less than 6 characters will be padded with spaces. + A \ :emphasis:`volid`\ with less than 6 characters will be padded with spaces. - A *volid* can also be referred to as volser or volume serial number. + A \ :emphasis:`volid`\ can also be referred to as volser or volume serial number. - When *volid* is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. + When \ :emphasis:`volid`\ is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. | **required**: False | **type**: str @@ -99,7 +99,7 @@ index The VTOC index enhances the performance of VTOC access. - When set to *false*, no index will be created. + When set to \ :emphasis:`false`\ , no index will be created. | **required**: False | **type**: bool @@ -109,7 +109,7 @@ index sms_managed Specifies that the volume be managed by Storage Management System (SMS). - If *sms_managed* is *true* then *index* must also be *true*. + If \ :emphasis:`sms\_managed`\ is \ :emphasis:`true`\ then \ :emphasis:`index`\ must also be \ :emphasis:`true`\ . | **required**: False | **type**: bool @@ -127,7 +127,7 @@ verify_volume_empty tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index e9c238b87..e3ea36dc8 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -403,7 +403,7 @@ def _remote_cleanup(self, dest, dest_exists, task_vars): else: module_args = dict(name=dest, state="absent") if is_member(dest): - module_args["type"] = "MEMBER" + module_args["type"] = "member" self._execute_module( module_name="ibm.ibm_zos_core.zos_data_set", module_args=module_args, @@ -466,6 +466,16 @@ def _update_result(is_binary, copy_res, original_args, original_src): updated_result["dest_created"] = True updated_result["destination_attributes"] = dest_data_set_attrs + # Setting attributes to lower case to conform to docs. + # Part of the change to lowercase choices in the collection involves having + # a consistent interface that also returns the same values in lowercase. + if "record_format" in updated_result["destination_attributes"]: + updated_result["destination_attributes"]["record_format"] = updated_result["destination_attributes"]["record_format"].lower() + if "space_type" in updated_result["destination_attributes"]: + updated_result["destination_attributes"]["space_type"] = updated_result["destination_attributes"]["space_type"].lower() + if "type" in updated_result["destination_attributes"]: + updated_result["destination_attributes"]["type"] = updated_result["destination_attributes"]["type"].lower() + return updated_result diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 6bbd0f9d9..8e06c340b 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -44,15 +44,15 @@ def run(self, tmp=None, task_vars=None): use_template = _process_boolean(module_args.get("use_template")) location = module_args.get("location") - if use_template and location != "LOCAL": + if use_template and location != "local": result.update(dict( failed=True, changed=False, - msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'LOCAL'".format(location) + msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'local'".format(location) )) return result - if location == "LOCAL": + if location == "local": source = self._task.args.get("src", None) diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 6e679d62d..ed508bcf0 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -87,11 +87,11 @@ def run(self, tmp=None, task_vars=None): ) dest = cmd_res.get("stdout") if dest_data_set.get("space_primary") is None: - dest_data_set.update(space_primary=5, space_type="M") + dest_data_set.update(space_primary=5, space_type="m") if format_name == 'terse': - dest_data_set.update(type='SEQ', record_format='FB', record_length=1024) + dest_data_set.update(type='seq', record_format='fb', record_length=1024) if format_name == 'xmit': - dest_data_set.update(type='SEQ', record_format='FB', record_length=80) + dest_data_set.update(type='seq', record_format='fb', record_length=80) copy_module_args.update( dict( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 3bd502858..40c1a4047 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -919,7 +919,7 @@ def _build_zoau_args(**kwargs): secondary += space_type type = kwargs.get("type") - if type and type == "ZFS": + if type and type.upper() == "ZFS": type = "LDS" volumes = ",".join(volumes) if volumes else None diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 951b6bc87..cbe96b65d 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -81,8 +81,8 @@ type: str required: false choices: - - PACK - - SPACK + - pack + - spack xmit_log_data_set: description: - Provide the name of a data set to store xmit log output. @@ -193,9 +193,9 @@ - Organization of the destination type: str required: false - default: SEQ + default: seq choices: - - SEQ + - seq space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -214,28 +214,28 @@ description: - If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false record_format: description: - If the destination data set does not exist, this sets the format of the data set. (e.g C(FB)) - - Choices are case-insensitive. + - Choices are case-sensitive. required: false choices: - - FB - - VB - - FBA - - VBA - - U + - fb + - vb + - fba + - vba + - u type: str record_length: description: @@ -356,7 +356,7 @@ format: name: terse format_options: - terse_pack: "SPACK" + terse_pack: "spack" use_adrdssu: True # Use a pattern to store @@ -795,17 +795,17 @@ def _create_dest_data_set( arguments.update(name=temp_ds) if record_format is None: - arguments.update(record_format="FB") + arguments.update(record_format="fb") if record_length is None: arguments.update(record_length=80) if type is None: - arguments.update(type="SEQ") + arguments.update(type="seq") if space_primary is None: arguments.update(space_primary=5) if space_secondary is None: arguments.update(space_secondary=3) if space_type is None: - arguments.update(space_type="M") + arguments.update(space_type="m") arguments.pop("self") changed = data_set.DataSet.ensure_present(**arguments) return arguments["name"], changed @@ -819,8 +819,8 @@ def create_dest_ds(self, name): name {str} - name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) - # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) + # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) @@ -952,15 +952,19 @@ def compute_dest_size(self): dest_space += int(ds.total_space) # space unit returned from listings is bytes dest_space = math.ceil(dest_space / 1024) - self.dest_data_set.update(space_primary=dest_space, space_type="K") + self.dest_data_set.update(space_primary=dest_space, space_type="k") class AMATerseArchive(MVSArchive): def __init__(self, module): super(AMATerseArchive, self).__init__(module) self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + # We store pack_ard in uppercase because the AMATerse command requires + # it in uppercase. if self.pack_arg is None: self.pack_arg = "SPACK" + else: + self.pack_arg = self.pack_arg.upper() def add(self, src, archive): """ @@ -987,8 +991,8 @@ def archive_targets(self): """ if self.use_adrdssu: source, changed = self._create_dest_data_set( - type="SEQ", - record_format="U", + type="seq", + record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True, @@ -1006,8 +1010,8 @@ def archive_targets(self): dest, changed = self._create_dest_data_set( name=self.dest, replace=True, - type='SEQ', - record_format='FB', + type='seq', + record_format='fb', record_length=AMATERSE_RECORD_LENGTH, space_primary=self.dest_data_set.get("space_primary"), space_type=self.dest_data_set.get("space_type")) @@ -1056,8 +1060,8 @@ def archive_targets(self): """ if self.use_adrdssu: source, changed = self._create_dest_data_set( - type="SEQ", - record_format="U", + type="seq", + record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True, @@ -1075,8 +1079,8 @@ def archive_targets(self): dest, changed = self._create_dest_data_set( name=self.dest, replace=True, - type='SEQ', - record_format='FB', + type='seq', + record_format='fb', record_length=XMIT_RECORD_LENGTH, space_primary=self.dest_data_set.get("space_primary"), space_type=self.dest_data_set.get("space_type")) @@ -1137,7 +1141,7 @@ def run_module(): options=dict( terse_pack=dict( type='str', - choices=['PACK', 'SPACK'], + choices=['pack', 'spack'], ), xmit_log_data_set=dict( type='str', @@ -1163,9 +1167,9 @@ def run_module(): ), type=dict( type='str', - choices=['SEQ'], + choices=['seq'], required=False, - default="SEQ", + default="seq", ), space_primary=dict( type='int', required=False), @@ -1173,12 +1177,12 @@ def run_module(): type='int', required=False), space_type=dict( type='str', - choices=['K', 'M', 'G', 'CYL', 'TRK'], + choices=['k', 'm', 'g', 'cyl', 'trk'], required=False, ), record_format=dict( type='str', - choices=["FB", "VB", "FBA", "VBA", "U"], + choices=["fb", "vb", "fba", "vba", "u"], required=False ), record_length=dict(type='int', required=False), @@ -1214,7 +1218,7 @@ def run_module(): terse_pack=dict( type='str', required=False, - choices=['PACK', 'SPACK'], + choices=['pack', 'spack'], ), xmit_log_data_set=dict( type='str', @@ -1226,7 +1230,7 @@ def run_module(): ) ), default=dict( - terse_pack="SPACK", + terse_pack="spack", xmit_log_data_set="", use_adrdssu=False), ), @@ -1234,7 +1238,7 @@ def run_module(): default=dict( name="", format_options=dict( - terse_pack="SPACK", + terse_pack="spack", xmit_log_data_set="", use_adrdssu=False ) @@ -1249,7 +1253,7 @@ def run_module(): required=False, options=dict( name=dict(arg_type='str', required=False), - type=dict(arg_type='str', required=False, default="SEQ"), + type=dict(arg_type='str', required=False, default="seq"), space_primary=dict(arg_type='int', required=False), space_secondary=dict( arg_type='int', required=False), diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 3185652e1..a112da247 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -168,15 +168,15 @@ space_type: description: - The unit of measurement to use when defining data set space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). - - When I(full_volume=True), I(space_type) defaults to C(G), otherwise default is C(M) + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). + - When I(full_volume=True), I(space_type) defaults to C(g), otherwise default is C(m) type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false aliases: - unit @@ -233,7 +233,7 @@ include: user.** backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup all datasets matching the pattern USER.** that are present on the volume MYVOL1 to data set MY.BACKUP.DZP, @@ -245,7 +245,7 @@ volume: MYVOL1 backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup an entire volume, MYVOL1, to the UNIX file /tmp/temp_backup.dzp, allocate 1GB for data sets used in backup process. @@ -255,7 +255,7 @@ volume: MYVOL1 full_volume: yes space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Use z/OS username as new HLQ. @@ -299,7 +299,7 @@ full_volume: yes backup_name: MY.BACKUP.DZP space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Specify DB2SMS10 for the SMS storage and management classes to use for the restored @@ -346,7 +346,7 @@ def main(): ), ), space=dict(type="int", required=False, aliases=["size"]), - space_type=dict(type="str", required=False, aliases=["unit"], choices=["K", "M", "G", "CYL", "TRK"]), + space_type=dict(type="str", required=False, aliases=["unit"], choices=["k", "m", "g", "cyl", "trk"]), volume=dict(type="str", required=False), full_volume=dict(type="bool", default=False), temp_volume=dict(type="str", required=False, aliases=["dest_volume"]), @@ -709,12 +709,12 @@ def space_type_type(contents, dependencies): """ if contents is None: if dependencies.get("full_volume"): - return "G" + return "g" else: - return "M" - if not match(r"^(M|G|K|TRK|CYL)$", contents, IGNORECASE): + return "m" + if not match(r"^(m|g|k|trk|cyl)$", contents, IGNORECASE): raise ValueError( - 'Value {0} is invalid for space_type argument. Valid space types are "K", "M", "G", "TRK" or "CYL".'.format( + 'Value {0} is invalid for space_type argument. Valid space types are "k", "m", "g", "trk" or "cyl".'.format( contents ) ) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9acb3c1c6..da29f688a 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -347,16 +347,16 @@ type: str required: true choices: - - KSDS - - ESDS - - RRDS - - LDS - - SEQ - - PDS - - PDSE - - MEMBER - - BASIC - - LIBRARY + - ksds + - esds + - rrds + - lds + - seq + - pds + - pdse + - member + - basic + - library space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -375,27 +375,27 @@ description: - If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false record_format: description: - If the destination data set does not exist, this sets the format of the - data set. (e.g C(FB)) - - Choices are case-insensitive. + data set. (e.g C(fb)) + - Choices are case-sensitive. required: false choices: - - FB - - VB - - FBA - - VBA - - U + - fb + - vb + - fba + - vba + - u type: str record_length: description: @@ -417,15 +417,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false sms_storage_class: @@ -642,11 +642,11 @@ remote_src: true volume: '222222' dest_data_set: - type: SEQ + type: seq space_primary: 10 space_secondary: 3 - space_type: K - record_format: VB + space_type: k + record_format: vb record_length: 150 - name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL @@ -702,7 +702,7 @@ description: Record format of the dataset. type: str - sample: FB + sample: fb record_length: description: Record length of the dataset. @@ -722,21 +722,21 @@ description: Unit of measurement for space. type: str - sample: K + sample: k type: description: Type of dataset allocated. type: str - sample: PDSE + sample: pdse sample: { "block_size": 32760, - "record_format": "FB", + "record_format": "fb", "record_length": 45, "space_primary": 2, "space_secondary": 1, - "space_type": "K", - "type": "PDSE" + "space_type": "k", + "type": "pdse" } checksum: description: SHA256 checksum of the file after running zos_copy. @@ -2802,7 +2802,7 @@ def run_module(module, arg_def): # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): - dest_ds_type = dest_data_set.get("type") + dest_ds_type = dest_data_set.get("type").upper() elif executable: """ When executable is selected and dest_exists is false means an executable PDSE was copied to remote, so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. @@ -2810,16 +2810,7 @@ def run_module(module, arg_def): and LIBRARY is not in MVS_PARTITIONED frozen set.""" dest_ds_type = "PDSE" - if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): - dest_has_asa_chars = True - elif not dest_exists and asa_text: - dest_has_asa_chars = True - elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_attributes = datasets.list_datasets(dest_name)[0] - if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': - dest_has_asa_chars = True - - if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): + if dest_data_set and (dest_data_set.get('record_format', '') == 'fba' or dest_data_set.get('record_format', '') == 'vba'): dest_has_asa_chars = True elif not dest_exists and asa_text: dest_has_asa_chars = True @@ -3177,8 +3168,8 @@ def main(): options=dict( type=dict( type='str', - choices=['BASIC', 'KSDS', 'ESDS', 'RRDS', - 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER', 'LIBRARY'], + choices=['basic', 'ksds', 'esds', 'rrds', + 'lds', 'seq', 'pds', 'pdse', 'member', 'library'], required=True, ), space_primary=dict( @@ -3187,12 +3178,12 @@ def main(): type='int', required=False), space_type=dict( type='str', - choices=['K', 'M', 'G', 'CYL', 'TRK'], + choices=['k', 'm', 'g', 'cyl', 'trk'], required=False, ), record_format=dict( type='str', - choices=["FB", "VB", "FBA", "VBA", "U"], + choices=["fb", "vb", "fba", "vba", "u"], required=False ), record_length=dict(type='int', required=False), diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 1969462c3..446fd6fe7 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -33,7 +33,7 @@ - The name of the data set being managed. (e.g C(USER.TEST)) - If I(name) is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - - Required if I(type=MEMBER) or I(state!=present) and not using I(batch). + - Required if I(type=member) or I(state!=present) and not using I(batch). type: str required: false state: @@ -46,7 +46,7 @@ If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). - > - If I(state=absent) and I(type=MEMBER) and I(force=True), the data set + If I(state=absent) and I(type=member) and I(force=True), the data set will be opened with I(DISP=SHR) such that the entire data set can be accessed by other processes while the specified member is deleted. - > @@ -77,7 +77,7 @@ If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). - > - If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + If I(state=present) and I(type=member) and the member does not exist in the data set, create a member formatted to store data, module completes successfully with I(changed=True). Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, @@ -109,26 +109,26 @@ - uncataloged type: description: - - The data set type to be used when creating a data set. (e.g C(pdse)) - - C(MEMBER) expects to be used with an existing partitioned data set. + - The data set type to be used when creating a data set. (e.g C(pdse)). + - C(member) expects to be used with an existing partitioned data set. - Choices are case-sensitive. required: false type: str choices: - - KSDS - - ESDS - - RRDS - - LDS - - SEQ - - PDS - - PDSE - - LIBRARY - - BASIC - - LARGE - - MEMBER - - HFS - - ZFS - default: PDS + - ksds + - esds + - rrds + - lds + - seq + - pds + - pdse + - library + - basic + - large + - member + - hfs + - zfs + default: pds space_primary: description: - The amount of primary space to allocate for the dataset. @@ -146,33 +146,33 @@ space_type: description: - The unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false - default: M + default: m record_format: description: - The format of the data set. (e.g C(FB)) - Choices are case-sensitive. - - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS) + - When I(type=ksds), I(type=esds), I(type=rrds), I(type=lds) or I(type=zfs) then I(record_format=None), these types do not have a default I(record_format). required: false choices: - - FB - - VB - - FBA - - VBA - - U - - F + - fb + - vb + - fba + - vba + - u + - f type: str - default: FB + default: fb aliases: - format sms_storage_class: @@ -221,15 +221,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false volumes: @@ -281,7 +281,7 @@ - The I(force=True) option enables sharing of data sets through the disposition I(DISP=SHR). - The I(force=True) only applies to data set members when I(state=absent) - and I(type=MEMBER). + and I(type=member). type: bool required: false default: false @@ -297,7 +297,7 @@ - The name of the data set being managed. (e.g C(USER.TEST)) - If I(name) is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - - Required if I(type=MEMBER) or I(state!=present) + - Required if I(type=member) or I(state!=present) type: str required: false state: @@ -310,7 +310,7 @@ If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). - > - If I(state=absent) and I(type=MEMBER) and I(force=True), the data + If I(state=absent) and I(type=member) and I(force=True), the data set will be opened with I(DISP=SHR) such that the entire data set can be accessed by other processes while the specified member is deleted. @@ -342,7 +342,7 @@ If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). - > - If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + If I(state=present) and I(type=member) and the member does not exist in the data set, create a member formatted to store data, module completes successfully with I(changed=True). Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, @@ -374,26 +374,26 @@ - uncataloged type: description: - - The data set type to be used when creating a data set. (e.g C(PDSE)) - - C(MEMBER) expects to be used with an existing partitioned data set. + - The data set type to be used when creating a data set. (e.g C(pdse)) + - C(member) expects to be used with an existing partitioned data set. - Choices are case-sensitive. required: false type: str choices: - - KSDS - - ESDS - - RRDS - - LDS - - SEQ - - PDS - - PDSE - - LIBRARY - - BASIC - - LARGE - - MEMBER - - HFS - - ZFS - default: PDS + - ksds + - esds + - rrds + - lds + - seq + - pds + - pdse + - library + - basic + - large + - member + - hfs + - zfs + default: pds space_primary: description: - The amount of primary space to allocate for the dataset. @@ -411,33 +411,33 @@ space_type: description: - The unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false - default: M + default: m record_format: description: - The format of the data set. (e.g C(FB)) - Choices are case-sensitive. - - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or - I(type=ZFS) then I(record_format=None), these types do not have a + - When I(type=ksds), I(type=esds), I(type=rrds), I(type=lds) or + I(type=zfs) then I(record_format=None), these types do not have a default I(record_format). required: false choices: - - FB - - VB - - FBA - - VBA - - U - - F + - fb + - vb + - fba + - vba + - u + - f type: str - default: FB + default: fb aliases: - format sms_storage_class: @@ -486,15 +486,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false volumes: @@ -539,7 +539,7 @@ - The I(force=True) option enables sharing of data sets through the disposition I(DISP=SHR). - The I(force=True) only applies to data set members when - I(state=absent) and I(type=MEMBER). + I(state=absent) and I(type=member). type: bool required: false default: false @@ -549,7 +549,7 @@ - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: SEQ + type: seq state: present - name: Create a PDS data set if it does not exist @@ -557,27 +557,27 @@ name: someds.name.here type: pds space_primary: 5 - space_type: M - record_format: FBA + space_type: m + record_format: fba record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 volumes: "222222" replace: yes @@ -585,19 +585,19 @@ - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: ESDS + type: esds - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: KSDS + type: ksds key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: RRDS + type: rrds sms_storage_class: mydata - name: Delete a data set if it exists @@ -614,43 +614,43 @@ - name: Write a member to an existing PDS; replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member replace: yes - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member - name: Remove a member from an existing PDS zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member - name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member force: yes - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - name: someds.name.here1 - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: FB + space_type: m + record_format: fb replace: yes - name: someds.name.here1(member1) - type: MEMBER + type: member - name: someds.name.here2(member1) - type: MEMBER + type: member replace: yes - name: someds.name.here2(member2) - type: MEMBER + type: member - name: Catalog a data set present on volume 222222 if it is uncataloged. zos_data_set: @@ -689,44 +689,44 @@ # CONSTANTS DATA_SET_TYPES = [ - "KSDS", - "ESDS", - "RRDS", - "LDS", - "SEQ", - "PDS", - "PDSE", - "BASIC", - "LARGE", - "LIBRARY", - "MEMBER", - "HFS", - "ZFS", + "ksds", + "esds", + "rrds", + "lds", + "seq", + "pds", + "pdse", + "basic", + "large", + "library", + "member", + "hfs", + "zfs", ] DATA_SET_FORMATS = [ - "FB", - "VB", - "FBA", - "VBA", - "U", - "F", + "fb", + "vb", + "fba", + "vba", + "u", + "f", ] DEFAULT_RECORD_LENGTHS = { - "FB": 80, - "FBA": 80, - "VB": 137, - "VBA": 137, - "U": 0, + "fb": 80, + "fba": 80, + "vb": 137, + "vba": 137, + "u": 0, } DATA_SET_TYPES_VSAM = [ - "KSDS", - "ESDS", - "RRDS", - "LDS", - "ZFS", + "ksds", + "esds", + "rrds", + "lds", + "zfs", ] # ------------- Functions to validate arguments ------------- # @@ -775,14 +775,14 @@ def data_set_name(contents, dependencies): if contents is None: if dependencies.get("state") != "present": raise ValueError('Data set name must be provided when "state!=present"') - if dependencies.get("type") != "MEMBER": + if dependencies.get("type") != "member": tmphlq = dependencies.get("tmp_hlq") if tmphlq is None: tmphlq = "" contents = DataSet.temp_name(tmphlq) else: raise ValueError( - 'Data set and member name must be provided when "type=MEMBER"' + 'Data set and member name must be provided when "type=member"' ) dsname = str(contents) if not re.fullmatch( @@ -796,7 +796,7 @@ def data_set_name(contents, dependencies): dsname, re.IGNORECASE, ) - and dependencies.get("type") == "MEMBER" + and dependencies.get("type") == "member" ): raise ValueError( "Value {0} is invalid for data set argument.".format(dsname) @@ -809,13 +809,13 @@ def space_type(contents, dependencies): """Validates provided data set unit of space is valid. Returns the unit of space.""" if dependencies.get("state") == "absent": - return "M" + return "m" if contents is None: return None - match = re.fullmatch(r"(M|G|K|TRK|CYL)", contents, re.IGNORECASE) + match = re.fullmatch(r"(m|g|k|trk|cyl)", contents, re.IGNORECASE) if not match: raise ValueError( - 'Value {0} is invalid for space_type argument. Valid space types are "K", "M", "G", "TRK" or "CYL".'.format( + 'Value {0} is invalid for space_type argument. Valid space types are "k", "m", "g", "trk" or "cyl".'.format( contents ) ) @@ -872,12 +872,11 @@ def record_length(contents, dependencies): # * dependent on state # * dependent on record_length def record_format(contents, dependencies): - """Validates data set format is valid. - Returns uppercase data set format.""" + """Validates data set format is valid.""" if dependencies.get("state") == "absent": - return "FB" + return "fb" if contents is None: - return "FB" + return "fb" formats = "|".join(DATA_SET_FORMATS) if not re.fullmatch(formats, contents, re.IGNORECASE): raise ValueError( @@ -885,17 +884,16 @@ def record_format(contents, dependencies): contents, ", ".join(DATA_SET_FORMATS) ) ) - return contents.upper() + return contents # * dependent on state def data_set_type(contents, dependencies): - """Validates data set type is valid. - Returns uppercase data set type.""" - # if dependencies.get("state") == "absent" and contents != "MEMBER": + """Validates data set type is valid.""" + # if dependencies.get("state") == "absent" and contents != "member": # return None if contents is None: - return "PDS" + return "pds" types = "|".join(DATA_SET_TYPES) if not re.fullmatch(types, contents, re.IGNORECASE): raise ValueError( @@ -903,7 +901,7 @@ def data_set_type(contents, dependencies): contents, ", ".join(DATA_SET_TYPES) ) ) - return contents.upper() + return contents # * dependent on state @@ -936,10 +934,10 @@ def key_length(contents, dependencies): Returns data set key length as integer.""" if dependencies.get("state") == "absent": return None - if dependencies.get("type") == "KSDS" and contents is None: + if dependencies.get("type") == "ksds" and contents is None: raise ValueError("key_length is required when requesting KSDS data set.") - if dependencies.get("type") != "KSDS" and contents is not None: - raise ValueError("key_length is only valid when type=KSDS.") + if dependencies.get("type") != "ksds" and contents is not None: + raise ValueError("key_length is only valid when type=ksds.") if contents is None: return None contents = int(contents) @@ -958,10 +956,10 @@ def key_offset(contents, dependencies): Returns data set key offset as integer.""" if dependencies.get("state") == "absent": return None - if dependencies.get("type") == "KSDS" and contents is None: + if dependencies.get("type") == "ksds" and contents is None: raise ValueError("key_offset is required when requesting KSDS data set.") - if dependencies.get("type") != "KSDS" and contents is not None: - raise ValueError("key_offset is only valid when type=KSDS.") + if dependencies.get("type") != "ksds" and contents is not None: + raise ValueError("key_offset is only valid when type=ksds.") if contents is None: return None contents = int(contents) @@ -981,13 +979,13 @@ def perform_data_set_operations(name, state, **extra_args): # passing in **extra_args forced me to modify the acceptable parameters # for multiple functions in data_set.py including ensure_present, replace # and create where the force parameter has no bearing. - if state == "present" and extra_args.get("type") != "MEMBER": + if state == "present" and extra_args.get("type") != "member": changed = DataSet.ensure_present(name, **extra_args) - elif state == "present" and extra_args.get("type") == "MEMBER": + elif state == "present" and extra_args.get("type") == "member": changed = DataSet.ensure_member_present(name, extra_args.get("replace")) - elif state == "absent" and extra_args.get("type") != "MEMBER": + elif state == "absent" and extra_args.get("type") != "member": changed = DataSet.ensure_absent(name, extra_args.get("volumes")) - elif state == "absent" and extra_args.get("type") == "MEMBER": + elif state == "absent" and extra_args.get("type") == "member": changed = DataSet.ensure_member_absent(name, extra_args.get("force")) elif state == "cataloged": changed = DataSet.ensure_cataloged(name, extra_args.get("volumes")) @@ -1024,8 +1022,8 @@ def parse_and_validate_args(params): type=space_type, required=False, dependencies=["state"], - choices=["K", "M", "G", "CYL", "TRK"], - default="M", + choices=["k", "m", "g", "cyl", "trk"], + default="m", ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict( @@ -1035,9 +1033,9 @@ def parse_and_validate_args(params): type=record_format, required=False, dependencies=["state"], - choices=["FB", "VB", "FBA", "VBA", "U", "F"], + choices=["fb", "vb", "fba", "vba", "u", "f"], aliases=["format"], - default="FB", + default="fb", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1113,8 +1111,8 @@ def parse_and_validate_args(params): type=space_type, required=False, dependencies=["state"], - choices=["K", "M", "G", "CYL", "TRK"], - default="M", + choices=["k", "m", "g", "cyl", "trk"], + default="m", ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict(type="int", required=False, dependencies=["state"]), @@ -1122,9 +1120,9 @@ def parse_and_validate_args(params): type=record_format, required=False, dependencies=["state"], - choices=["FB", "VB", "FBA", "VBA", "U", "F"], + choices=["fb", "vb", "fba", "vba", "u", "f"], aliases=["format"], - default="FB", + default="fb", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1224,14 +1222,14 @@ def run_module(): type=dict( type="str", required=False, - default="PDS", + default="pds", choices=DATA_SET_TYPES, ), space_type=dict( type="str", required=False, - default="M", - choices=["K", "M", "G", "CYL", "TRK"], + default="m", + choices=["k", "m", "g", "cyl", "trk"], ), space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), @@ -1239,8 +1237,8 @@ def run_module(): type="str", required=False, aliases=["format"], - default="FB", - choices=["FB", "VB", "FBA", "VBA", "U", "F"], + default="fb", + choices=["fb", "vb", "fba", "vba", "u", "f"], ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported @@ -1289,14 +1287,14 @@ def run_module(): type=dict( type="str", required=False, - default="PDS", + default="pds", choices=DATA_SET_TYPES, ), space_type=dict( type="str", required=False, - default="M", - choices=["K", "M", "G", "CYL", "TRK"], + default="m", + choices=["k", "m", "g", "cyl", "trk"], ), space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), @@ -1304,8 +1302,8 @@ def run_module(): type="str", required=False, aliases=["format"], - choices=["FB", "VB", "FBA", "VBA", "U", "F"], - default="FB" + choices=["fb", "vb", "fba", "vba", "u", "f"], + default="fb" ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported @@ -1357,7 +1355,7 @@ def run_module(): # This section is copied down inside if/check_mode false, so it modifies after the arg parser if module.params.get("batch") is not None: for entry in module.params.get("batch"): - if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: + if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM: entry["record_format"] = None if module.params.get("type") is not None: module.params["type"] = None @@ -1374,7 +1372,7 @@ def run_module(): if module.params.get("record_format") is not None: module.params["record_format"] = None elif module.params.get("type") is not None: - if module.params.get("type").upper() in DATA_SET_TYPES_VSAM: + if module.params.get("type") in DATA_SET_TYPES_VSAM: # For VSAM types set the value to nothing and let the code manage it # module.params["record_format"] = None if module.params.get("record_format") is not None: @@ -1394,7 +1392,7 @@ def run_module(): # This *appears* redundant, bit the parse_and_validate reinforces the default value for record_type if data_set_params.get("batch") is not None: for entry in data_set_params.get("batch"): - if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: + if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM: entry["record_format"] = None if data_set_params.get("type") is not None: data_set_params["type"] = None @@ -1411,7 +1409,7 @@ def run_module(): if data_set_params.get("record_format") is not None: data_set_params["record_format"] = None else: - if data_set_params.get("type").upper() in DATA_SET_TYPES_VSAM: + if data_set_params.get("type") in DATA_SET_TYPES_VSAM: if data_set_params.get("record_format") is not None: data_set_params["record_format"] = None diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 7c66c2543..1b56f459d 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -42,17 +42,17 @@ (e.g "/User/tester/ansible-playbook/sample.jcl") location: required: false - default: DATA_SET + default: data_set type: str choices: - - DATA_SET - - USS - - LOCAL + - data_set + - uss + - local description: - - The JCL location. Supported choices are ``DATA_SET``, ``USS`` or ``LOCAL``. - - DATA_SET can be a PDS, PDSE, or sequential data set. - - USS means the JCL location is located in UNIX System Services (USS). - - LOCAL means locally to the ansible control node. + - The JCL location. Supported choices are C(data_set), C(uss) or C(local). + - C(data_set) can be a PDS, PDSE, or sequential data set. + - C(uss) means the JCL location is located in UNIX System Services (USS). + - C(local) means locally to the ansible control node. wait_time_s: required: false default: 10 @@ -80,17 +80,17 @@ required: false type: str description: - - The volume serial (VOLSER)is where the data set resides. The option + - The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system. - When configured, the L(zos_job_submit,./zos_job_submit.html) will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - - Ignored for I(location=USS) and I(location=LOCAL). + - Ignored for I(location=uss) and I(location=local). encoding: description: - Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - - This option is only supported for when I(location=LOCAL). + - This option is only supported for when I(location=local). - If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system @@ -561,19 +561,19 @@ - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: USS + location: uss return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: LOCAL + location: local encoding: from: ISO8859-1 to: IBM-037 @@ -581,25 +581,25 @@ - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: DATA_SET + location: data_set max_rc: 16 """ @@ -805,8 +805,8 @@ def run_module(): src=dict(type="str", required=True), location=dict( type="str", - default="DATA_SET", - choices=["DATA_SET", "USS", "LOCAL"], + default="data_set", + choices=["data_set", "uss", "local"], ), encoding=dict( type="dict", @@ -875,8 +875,8 @@ def run_module(): src=dict(arg_type="data_set_or_path", required=True), location=dict( arg_type="str", - default="DATA_SET", - choices=["DATA_SET", "USS", "LOCAL"], + default="data_set", + choices=["data_set", "uss", "local"], ), from_encoding=dict( arg_type="encoding", default=Defaults.DEFAULT_ASCII_CHARSET, required=False), @@ -907,7 +907,7 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - temp_file = parsed_args.get("src") if location == "LOCAL" else None + temp_file = parsed_args.get("src") if location == "local" else None # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) @@ -921,13 +921,13 @@ def run_module(): job_submitted_id = None duration = 0 start_time = timer() - if location == "DATA_SET": + if location == "data_set": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=False, volume=volume, start_time=start_time) - elif location == "USS": + elif location == "uss": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) - elif location == "LOCAL": + elif location == "local": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 3f4c642f3..61ca20b9f 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -48,13 +48,13 @@ description: - The type of file system that will be mounted. - The physical file systems data set format to perform the logical mount. - - The I(fs_type) is required to be uppercase. + - The I(fs_type) is required to be lowercase. type: str choices: - - HFS - - ZFS - - NFS - - TFS + - hfs + - zfs + - nfs + - tfs required: True state: description: @@ -168,33 +168,33 @@ file hierarchy). type: str choices: - - DRAIN - - FORCE - - IMMEDIATE - - NORMAL - - REMOUNT - - RESET + - drain + - force + - immediate + - normal + - remount + - reset required: False - default: NORMAL + default: normal mount_opts: description: - Options available to the mount. - - If I(mount_opts=RO) on a mounted/remount, mount is performed + - If I(mount_opts=ro) on a mounted/remount, mount is performed read-only. - - If I(mount_opts=SAME) and (unmount_opts=REMOUNT), mount is opened + - If I(mount_opts=same) and (unmount_opts=remount), mount is opened in the same mode as previously opened. - - If I(mount_opts=NOWAIT), mount is performed asynchronously. - - If I(mount_opts=NOSECURITY), security checks are not enforced for + - If I(mount_opts=nowait), mount is performed asynchronously. + - If I(mount_opts=nosecurity), security checks are not enforced for files in this file system. type: str choices: - - RO - - RW - - SAME - - NOWAIT - - NOSECURITY + - ro + - rw + - same + - nowait + - nosecurity required: False - default: RW + default: rw src_params: description: - Specifies a parameter string to be passed to the file system type. @@ -206,15 +206,15 @@ description: - If present, tags get written to any untagged file. - When the file system is unmounted, the tags are lost. - - If I(tag_untagged=NOTEXT) none of the untagged files in the file system are + - If I(tag_untagged=notext) none of the untagged files in the file system are automatically converted during file reading and writing. - - If I(tag_untagged=TEXT) each untagged file is implicitly marked as + - If I(tag_untagged=text) each untagged file is implicitly marked as containing pure text data that can be converted. - If this flag is used, use of tag_ccsid is encouraged. type: str choices: - - TEXT - - NOTEXT + - text + - notext required: False tag_ccsid: description: @@ -271,23 +271,23 @@ AUTOMOVE where the file system will be randomly moved to another system (no system list used). - > - I(automove=AUTOMOVE) indicates that ownership of the file system can be + I(automove=automove) indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. - > - I(automove=NOAUTOMOVE) prevents movement of the file system's ownership in some situations. + I(automove=noautomove) prevents movement of the file system's ownership in some situations. - > - I(automove=UNMOUNT) allows the file system to be unmounted in some situations. + I(automove=unmount) allows the file system to be unmounted in some situations. type: str choices: - - AUTOMOVE - - NOAUTOMOVE - - UNMOUNT + - automove + - noautomove + - unmount required: False - default: AUTOMOVE + default: automove automove_list: description: - > - If(automove=AUTOMOVE), this option will be checked. + If(automove=automove), this option will be checked. - > This specifies the list of servers to include or exclude as destinations. - > @@ -317,14 +317,14 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted - name: Unmount a filesystem. zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: unmounted unmount_opts: REMOUNT opts: same @@ -333,7 +333,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: RO @@ -341,7 +341,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -351,7 +351,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -363,7 +363,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted allow_uid: no @@ -371,7 +371,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted opts: nowait @@ -379,7 +379,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: NOSECURITY @@ -387,7 +387,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: I,DEV1,DEV2,DEV3,DEV9 @@ -396,7 +396,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7 @@ -854,7 +854,7 @@ def run_module(module, arg_def): src, path, fs_type ) ) - if "RO" in mount_opts: + if "ro" in mount_opts: subcmd = "READ" else: subcmd = "RDWR" @@ -882,14 +882,14 @@ def run_module(module, arg_def): fullcmd = fullcmd + " NOSETUID" parmtext = parmtext + "\n NOSETUID" - if "NOWAIT" in mount_opts: + if "nowait" in mount_opts: fullcmd = fullcmd + " NOWAIT" parmtext = parmtext + "\n NOWAIT" else: fullcmd = fullcmd + " WAIT" parmtext = parmtext + "\n WAIT" - if "NOSECURITY" in mount_opts: + if "nosecurity" in mount_opts: fullcmd = fullcmd + " NOSECURITY" parmtext = parmtext + "\n NOSECURITY" else: @@ -1051,10 +1051,10 @@ def main(): fs_type=dict( type="str", choices=[ - "HFS", - "ZFS", - "NFS", - "TFS", + "hfs", + "zfs", + "nfs", + "tfs", ], required=True, ), @@ -1079,27 +1079,27 @@ def main(): ), unmount_opts=dict( type="str", - default="NORMAL", - choices=["DRAIN", "FORCE", "IMMEDIATE", "NORMAL", "REMOUNT", "RESET"], + default="normal", + choices=["drain", "force", "immediate", "normal", "remount", "reset"], required=False, ), mount_opts=dict( type="str", - default="RW", - choices=["RO", "RW", "SAME", "NOWAIT", "NOSECURITY"], + default="rw", + choices=["ro", "rw", "same", "nowait", "nosecurity"], required=False, ), src_params=dict(type="str", required=False), tag_untagged=dict( - type="str", choices=["TEXT", "NOTEXT"], required=False + type="str", choices=["text", "notext"], required=False ), tag_ccsid=dict(type="int", required=False), allow_uid=dict(type="bool", default=True, required=False), sysname=dict(type="str", required=False), automove=dict( type="str", - default="AUTOMOVE", - choices=["AUTOMOVE", "NOAUTOMOVE", "UNMOUNT"], + default="automove", + choices=["automove", "noautomove", "unmount"], required=False, ), automove_list=dict(type="str", required=False), @@ -1114,10 +1114,10 @@ def main(): fs_type=dict( arg_type="str", choices=[ - "HFS", - "ZFS", - "NFS", - "TFS", + "hfs", + "zfs", + "nfs", + "tfs", ], required=True, ), @@ -1139,27 +1139,27 @@ def main(): ), unmount_opts=dict( arg_type="str", - default="NORMAL", - choices=["DRAIN", "FORCE", "IMMEDIATE", "NORMAL", "REMOUNT", "RESET"], + default="normal", + choices=["drain", "force", "immediate", "normal", "remount", "reset"], required=False, ), mount_opts=dict( arg_type="str", - default="RW", - choices=["RO", "RW", "SAME", "NOWAIT", "NOSECURITY"], + default="rw", + choices=["ro", "rw", "same", "nowait", "nosecurity"], required=False, ), src_params=dict(arg_type="str", default="", required=False), tag_untagged=dict( - arg_type="str", choices=["TEXT", "NOTEXT"], required=False + arg_type="str", choices=["text", "notext"], required=False ), tag_ccsid=dict(arg_type="int", required=False), allow_uid=dict(arg_type="bool", default=True, required=False), sysname=dict(arg_type="str", default="", required=False), automove=dict( arg_type="str", - default="AUTOMOVE", - choices=["AUTOMOVE", "NOAUTOMOVE", "UNMOUNT"], + default="automove", + choices=["automove", "noautomove", "unmount"], required=False, ), automove_list=dict(arg_type="str", default="", required=False), diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index a440c31c6..bcac50a63 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -96,16 +96,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - LIBRARY - - PDS - - PDSE - - LARGE - - BASIC - - SEQ - - RRDS - - ESDS - - LDS - - KSDS + - library + - pds + - pdse + - large + - basic + - seq + - rrds + - esds + - lds + - ksds disposition: description: - I(disposition) indicates the status of a data set. @@ -125,9 +125,7 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog disposition_abnormal: description: @@ -138,32 +136,30 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog reuse: description: - - Determines if a data set should be reused if I(disposition=NEW) and if a data set with a matching name already exists. + - Determines if a data set should be reused if I(disposition=new) and if a data set with a matching name already exists. - If I(reuse=true), I(disposition) will be automatically switched to C(SHR). - If I(reuse=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(replace). - - I(reuse) is only considered when I(disposition=NEW) + - I(reuse) is only considered when I(disposition=new) type: bool default: false replace: description: - - Determines if a data set should be replaced if I(disposition=NEW) and a data set with a matching name already exists. + - Determines if a data set should be replaced if I(disposition=new) and a data set with a matching name already exists. - If I(replace=true), the original data set will be deleted, and a new data set created. - If I(replace=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(reuse). - - I(replace) is only considered when I(disposition=NEW) + - I(replace) is only considered when I(disposition=new) - I(replace) will result in loss of all data in the original data set unless I(backup) is specified. type: bool default: false backup: description: - - Determines if a backup should be made of an existing data set when I(disposition=NEW), I(replace=true), + - Determines if a backup should be made of an existing data set when I(disposition=new), I(replace=true), and a data set with the desired name is found. - I(backup) is only used when I(replace=true). type: bool @@ -174,12 +170,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - TRK - - CYL - - B - - K - - M - - G + - trk + - cyl + - b + - k + - m + - g space_primary: description: - The primary amount of space to allocate for a new data set. @@ -260,8 +256,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD1 on z/OS. type: str required: true @@ -289,8 +285,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD2 on z/OS. type: str required: true @@ -316,7 +312,7 @@ - The logical record length. (e.g C(80)). - For variable data sets, the length must include the 4-byte prefix area. - "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0." - - Valid values are (1-32760 for non-vsam, 1-32761 for vsam). + - Valid values are (1-32760 for non-VSAM, 1-32761 for VSAM). - Maps to LRECL on z/OS. type: int required: false @@ -325,11 +321,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -505,11 +501,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -717,16 +713,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - LIBRARY - - PDS - - PDSE - - LARGE - - BASIC - - SEQ - - RRDS - - ESDS - - LDS - - KSDS + - library + - pds + - pdse + - large + - basic + - seq + - rrds + - esds + - lds + - ksds disposition: description: - I(disposition) indicates the status of a data set. @@ -746,9 +742,7 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog disposition_abnormal: description: @@ -759,32 +753,30 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog reuse: description: - - Determines if data set should be reused if I(disposition=NEW) and a data set with matching name already exists. + - Determines if data set should be reused if I(disposition=new) and a data set with matching name already exists. - If I(reuse=true), I(disposition) will be automatically switched to C(SHR). - If I(reuse=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(replace). - - I(reuse) is only considered when I(disposition=NEW) + - I(reuse) is only considered when I(disposition=new) type: bool default: false replace: description: - - Determines if data set should be replaced if I(disposition=NEW) and a data set with matching name already exists. + - Determines if data set should be replaced if I(disposition=new) and a data set with matching name already exists. - If I(replace=true), the original data set will be deleted, and a new data set created. - If I(replace=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(reuse). - - I(replace) is only considered when I(disposition=NEW) + - I(replace) is only considered when I(disposition=new) - I(replace) will result in loss of all data in the original data set unless I(backup) is specified. type: bool default: false backup: description: - - Determines if a backup should be made of existing data set when I(disposition=NEW), I(replace=true), + - Determines if a backup should be made of existing data set when I(disposition=new), I(replace=true), and a data set with the desired name is found. - I(backup) is only used when I(replace=true). type: bool @@ -795,12 +787,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - TRK - - CYL - - B - - K - - M - - G + - trk + - cyl + - b + - k + - m + - g space_primary: description: - The primary amount of space to allocate for a new data set. @@ -881,8 +873,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD1 on z/OS. type: str required: true @@ -910,8 +902,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD2 on z/OS. type: str required: true @@ -946,11 +938,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -988,7 +980,7 @@ path: description: - The path to an existing UNIX file. - - Or provide the path to an new created UNIX file when I(status_group=OCREAT). + - Or provide the path to an new created UNIX file when I(status_group=ocreat). - The provided path must be absolute. required: true type: str @@ -1124,11 +1116,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -1300,13 +1292,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1324,13 +1316,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1369,13 +1361,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1398,15 +1390,15 @@ disposition: new replace: yes backup: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - "111111" - "SCR002" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1641,13 +1633,13 @@ def run_module(): disposition=dict(type="str", choices=["new", "shr", "mod", "old"]), disposition_normal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), disposition_abnormal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), - space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), + space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type="raw"), @@ -1660,16 +1652,16 @@ def run_module(): type=dict( type="str", choices=[ - "LIBRARY", - "PDS", - "PDSE", - "SEQ", - "BASIC", - "LARGE", - "KSDS", - "RRDS", - "LDS", - "ESDS", + "library", + "pds", + "pdse", + "seq", + "basic", + "large", + "ksds", + "rrds", + "lds", + "esds", ], ), encryption_key_1=dict( @@ -1691,7 +1683,7 @@ def run_module(): key_length=dict(type="int", no_log=False), key_offset=dict(type="int", no_log=False), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -1766,7 +1758,7 @@ def run_module(): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -1884,13 +1876,13 @@ def parse_and_validate_args(params): disposition=dict(type="str", choices=["new", "shr", "mod", "old"]), disposition_normal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), disposition_abnormal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), - space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), + space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type=volumes), @@ -1903,16 +1895,16 @@ def parse_and_validate_args(params): type=dict( type="str", choices=[ - "LIBRARY", - "PDS", - "PDSE", - "SEQ", - "BASIC", - "LARGE", - "KSDS", - "RRDS", - "LDS", - "ESDS", + "library", + "pds", + "pdse", + "seq", + "basic", + "large", + "ksds", + "rrds", + "lds", + "esds", ], ), encryption_key_1=dict( @@ -1936,7 +1928,7 @@ def parse_and_validate_args(params): type=key_offset, default=key_offset_default, dependencies=["type"] ), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -1992,7 +1984,7 @@ def parse_and_validate_args(params): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -2084,8 +2076,8 @@ def key_length(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "KSDS": - raise ValueError('key_length is only valid when "type=KSDS".') + if contents is not None and dependencies.get("type") != "ksds": + raise ValueError('key_length is only valid when "type=ksds".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( 'Invalid argument "{0}" for type "key_length".'.format(str(contents)) @@ -2105,8 +2097,8 @@ def key_offset(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "KSDS": - raise ValueError('key_offset is only valid when "type=KSDS".') + if contents is not None and dependencies.get("type") != "ksds": + raise ValueError('key_offset is only valid when "type=ksds".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( @@ -2127,9 +2119,9 @@ def key_length_default(contents, dependencies): """ KEY_LENGTH = 5 length = None - if contents is None and dependencies.get("type") == "KSDS": + if contents is None and dependencies.get("type") == "ksds": length = KEY_LENGTH - elif dependencies.get("type") == "KSDS": + elif dependencies.get("type") == "ksds": length = contents return length @@ -2145,9 +2137,9 @@ def key_offset_default(contents, dependencies): """ KEY_OFFSET = 0 offset = None - if contents is None and dependencies.get("type") == "KSDS": + if contents is None and dependencies.get("type") == "ksds": offset = KEY_OFFSET - elif dependencies.get("type") == "KSDS": + elif dependencies.get("type") == "ksds": offset = contents return offset diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index aa315b3fb..31d709a3a 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -181,11 +181,11 @@ - Organization of the destination type: str required: false - default: SEQ + default: seq choices: - - SEQ - - PDS - - PDSE + - seq + - pds + - pdse space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -204,28 +204,28 @@ description: - If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false record_format: description: - If the destination data set does not exist, this sets the format of the - data set. (e.g C(FB)) - - Choices are case-insensitive. + data set. (e.g C(fb)) + - Choices are case-sensitive. required: false choices: - - FB - - VB - - FBA - - VBA - - U + - fb + - vb + - fba + - vba + - u type: str record_length: description: @@ -249,15 +249,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false sms_storage_class: @@ -695,11 +695,11 @@ def _create_dest_data_set( temp_ds = datasets.tmp_name(high_level_qualifier=hlq) arguments.update(name=temp_ds) if record_format is None: - arguments.update(record_format="FB") + arguments.update(record_format="fb") if record_length is None: arguments.update(record_length=80) if type is None: - arguments.update(type="SEQ") + arguments.update(type="seq") if space_primary is None: arguments.update(space_primary=self._compute_dest_data_set_size()) arguments.pop("self") @@ -802,8 +802,8 @@ def extract_src(self): temp_ds, rc = self._create_dest_data_set(**self.dest_data_set) rc = self.unpack(self.src, temp_ds) else: - temp_ds, rc = self._create_dest_data_set(type="SEQ", - record_format="U", + temp_ds, rc = self._create_dest_data_set(type="seq", + record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True) @@ -823,7 +823,7 @@ def _list_content(self, source): self._get_restored_datasets(out) def list_archive_content(self): - temp_ds, rc = self._create_dest_data_set(type="SEQ", record_format="U", record_length=0, tmp_hlq=self.tmphlq, replace=True) + temp_ds, rc = self._create_dest_data_set(type="seq", record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True) self.unpack(self.src, temp_ds) self._list_content(temp_ds) datasets.delete(temp_ds) @@ -1026,9 +1026,9 @@ def run_module(): ), type=dict( type='str', - choices=['SEQ', 'PDS', 'PDSE'], + choices=['seq', 'pds', 'pdse'], required=False, - default='SEQ', + default='seq', ), space_primary=dict( type='int', required=False), @@ -1036,12 +1036,12 @@ def run_module(): type='int', required=False), space_type=dict( type='str', - choices=['K', 'M', 'G', 'CYL', 'TRK'], + choices=['k', 'm', 'g', 'cyl', 'trk'], required=False, ), record_format=dict( type='str', - choices=["FB", "VB", "FBA", "VBA", "U"], + choices=["fb", "vb", "fba", "vba", "u"], required=False ), record_length=dict(type='int', required=False), @@ -1107,7 +1107,7 @@ def run_module(): required=False, options=dict( name=dict(arg_type='str', required=False), - type=dict(arg_type='str', required=False, default="SEQ"), + type=dict(arg_type='str', required=False, default="seq"), space_primary=dict(arg_type='int', required=False), space_secondary=dict( arg_type='int', required=False), diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index a9bfd658c..f6b1140fa 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -336,16 +336,16 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB"], + "record_format", ["fb", "vb"], ) def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -366,7 +366,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -375,7 +375,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -388,7 +388,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -415,16 +415,16 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB"], + "record_format", ["fb", "vb"], ) def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -445,7 +445,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -454,7 +454,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -468,7 +468,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data format_dict = dict(name=format) format_dict["format_options"] = dict(use_adrdssu=True) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -495,9 +495,9 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]), ] ) def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set): @@ -514,11 +514,11 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d name=src_data_set, type=data_set.get("dstype"), state="present", - record_format="FB", + record_format="fb", replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -536,7 +536,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -566,9 +566,9 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): @@ -582,7 +582,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -600,7 +600,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src="{0}*".format(src_data_set), @@ -629,9 +629,9 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set): @@ -645,7 +645,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -663,7 +663,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) exclude = "{0}1".format(src_data_set) archive_result = hosts.all.zos_archive( @@ -697,9 +697,9 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set): @@ -713,7 +713,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -731,7 +731,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src="{0}*".format(src_data_set), @@ -762,9 +762,9 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set): @@ -778,7 +778,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -801,7 +801,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, @@ -836,9 +836,9 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set): @@ -858,7 +858,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -876,7 +876,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index a35750b63..ca7ef740a 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -307,16 +307,16 @@ def test_backup_and_restore_of_data_set( @pytest.mark.parametrize( "backup_name,space,space_type", [ - (DATA_SET_BACKUP_LOCATION, 10, "M"), - (DATA_SET_BACKUP_LOCATION, 10000, "K"), + (DATA_SET_BACKUP_LOCATION, 10, "m"), + (DATA_SET_BACKUP_LOCATION, 10000, "k"), (DATA_SET_BACKUP_LOCATION, 10, None), - (DATA_SET_BACKUP_LOCATION, 2, "CYL"), - (DATA_SET_BACKUP_LOCATION, 10, "TRK"), - (UNIX_BACKUP_LOCATION, 10, "M"), - (UNIX_BACKUP_LOCATION, 10000, "K"), + (DATA_SET_BACKUP_LOCATION, 2, "cyl"), + (DATA_SET_BACKUP_LOCATION, 10, "trk"), + (UNIX_BACKUP_LOCATION, 10, "m"), + (UNIX_BACKUP_LOCATION, 10000, "k"), (UNIX_BACKUP_LOCATION, 10, None), - (UNIX_BACKUP_LOCATION, 2, "CYL"), - (UNIX_BACKUP_LOCATION, 10, "TRK"), + (UNIX_BACKUP_LOCATION, 2, "cyl"), + (UNIX_BACKUP_LOCATION, 10, "trk"), ], ) def test_backup_and_restore_of_data_set_various_space_measurements( @@ -693,7 +693,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # backup_name=DATA_SET_BACKUP_LOCATION, # overwrite=True, # space=500, -# space_type="M", +# space_type="m", # ) # assert_module_did_not_fail(results) # assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) @@ -706,7 +706,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # full_volume=True, # sms_storage_class="DB2SMS10", # space=500, -# space_type="M", +# space_type="m", # ) # assert_module_did_not_fail(results) # assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 197bc9fa3..508a2ce8d 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -428,10 +428,10 @@ ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] # supported data set types -DS_TYPE = ['SEQ', 'PDS', 'PDSE'] +DS_TYPE = ['seq', 'pds', 'pdse'] # not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] +NS_DS_TYPE = ['esds', 'rrds', 'lds'] USS_BACKUP_FILE = "/tmp/backup.tmp" BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] @@ -450,7 +450,7 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT) hosts = ansible_zos_module hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) - if DS_TYPE in ["PDS", "PDSE"]: + if DS_TYPE in ["pds", "pdse"]: DS_FULL_NAME = DS_NAME + "(MEM)" hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) @@ -1138,7 +1138,7 @@ def test_ds_block_absent(ansible_zos_module, dstype): def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module - ds_type = "SEQ" + ds_type = "seq" params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") kwargs = dict(backup_name=r"TMPHLQ\..") content = TEST_CONTENT @@ -1228,7 +1228,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT - if ds_type == "SEQ": + if ds_type == "seq": params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: params["path"] = default_data_set_name+"({0})".format(MEMBER_2) @@ -1245,7 +1245,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): ] ) # write memeber to verify cases - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) else: cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) @@ -1321,7 +1321,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) hosts.all.zos_data_set(name=ds_name, type=ds_type) - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) @@ -1360,7 +1360,7 @@ def test_not_exist_ds_block_insertafter_regex(ansible_zos_module): @pytest.mark.ds def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): hosts = ansible_zos_module - ds_type = 'SEQ' + ds_type = 'seq' params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' ds_name = get_tmp_ds_name() @@ -1413,7 +1413,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) +@pytest.mark.parametrize("dstype", ["pds","pdse"]) def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 6e6a9a073..13e6d367b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -260,7 +260,7 @@ def populate_partitioned_data_set(hosts, name, ds_type, members=None): Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the data set. - ds_type (str) -- Type of the data set (either PDS or PDSE). + ds_type (str) -- Type of the data set (either pds or pdse). members (list, optional) -- List of member names to create. """ if not members: @@ -282,9 +282,9 @@ def get_listcat_information(hosts, name, ds_type): Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the data set. - ds_type (str) -- Type of data set ("SEQ", "PDS", "PDSE", "KSDS"). + ds_type (str) -- Type of data set ("seq", "pds", "pdse", "ksds"). """ - if ds_type.upper() == "KSDS": + if ds_type == "ksds": idcams_input = " LISTCAT ENT('{0}') DATA ALL".format(name) else: idcams_input = " LISTCAT ENTRIES('{0}')".format(name) @@ -311,7 +311,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the VSAM data set. - type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + type (str) -- Type of the VSAM (ksds, esds, rrds, lds) add_data (bool, optional) -- Whether to add records to the VSAM. key_length (int, optional) -- Key length (only for KSDS data sets). key_offset (int, optional) -- Key offset (only for KSDS data sets). @@ -321,7 +321,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, type=ds_type, state="present" ) - if ds_type == "KSDS": + if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -370,7 +370,7 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo # Submit link JCL. job_result = hosts.all.zos_job_submit( src="/tmp/link.jcl", - location="USS", + location="uss", wait_time_s=60 ) for result in job_result.contacted.values(): @@ -1690,7 +1690,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="SEQ", + type="seq", replace=True ) @@ -1739,7 +1739,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="SEQ", + type="seq", replace=True ) @@ -1790,7 +1790,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1840,7 +1840,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1890,8 +1890,8 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="SEQ", - record_format="FBA", + type="seq", + record_format="fba", record_length=80, block_size=27920, replace=True @@ -1966,13 +1966,13 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"]) +@pytest.mark.parametrize("ds_type", [ "pds", "pdse", "seq"]) def test_copy_dest_lock(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set_1 = get_tmp_ds_name() data_set_2 = get_tmp_ds_name() member_1 = "MEM1" - if ds_type == "PDS" or ds_type == "PDSE": + if ds_type == "pds" or ds_type == "pdse": src_data_set = data_set_1 + "({0})".format(member_1) dest_data_set = data_set_2 + "({0})".format(member_1) else: @@ -1982,9 +1982,9 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts = ansible_zos_module hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) - if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) + if ds_type == "pds" or ds_type == "pdse": + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2272,7 +2272,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2300,7 +2300,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2438,7 +2438,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2464,7 +2464,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2495,7 +2495,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2526,7 +2526,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2571,10 +2571,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=80, replace=True ) @@ -2617,14 +2617,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") + hosts.all.zos_data_set(name=dest, type="member", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2653,31 +2653,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="SEQ", is_binary=False), - dict(type="SEQ", is_binary=True), - dict(type="PDS", is_binary=False), - dict(type="PDS", is_binary=True), - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True) + dict(type="seq", is_binary=False), + dict(type="seq", is_binary=True), + dict(type="pds", is_binary=False), + dict(type="pds", is_binary=True), + dict(type="pdse", is_binary=False), + dict(type="pdse", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "SEQ": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2700,32 +2700,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="SEQ", force=False), - dict(type="SEQ", force=True), - dict(type="PDS", force=False), - dict(type="PDS", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True) + dict(type="seq", force=False), + dict(type="seq", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "SEQ": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) - hosts.all.zos_data_set(name=dest, type="MEMBER") + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest, type="member") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2844,7 +2844,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2859,8 +2859,8 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): name=dest, type=src_type, space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=80, ) @@ -2883,18 +2883,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "SEQ" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "SEQ": - hosts.all.zos_data_set(name=src, type="MEMBER") + if src_type != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2924,10 +2924,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="PDS", dest_type="PDS"), - dict(src_type="PDS", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDS"), - dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="pds", dest_type="pds"), + dict(src_type="pds", dest_type="pdse"), + dict(src_type="pdse", dest_type="pds"), + dict(src_type="pdse", dest_type="pdse"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2979,9 +2979,9 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -2990,12 +2990,12 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3012,24 +3012,24 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) # pre-allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3117,20 +3117,20 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3138,23 +3138,23 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3267,9 +3267,9 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3278,12 +3278,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3306,24 +3306,24 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) # allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3337,12 +3337,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): executable=True, aliases=False, dest_data_set={ - 'type': "LIBRARY", - 'record_format': "U", + 'type': "library", + 'record_format': "u", 'record_length': 0, 'block_size': 32760, 'space_primary': 2, - 'space_type': "M", + 'space_type': "m", } ) # copy src loadlib to dest library pds w aliases @@ -3353,12 +3353,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): executable=True, aliases=True, dest_data_set={ - 'type': "LIBRARY", - 'record_format': "U", + 'type': "library", + 'record_format': "u", 'record_length': 0, 'block_size': 32760, 'space_primary': 2, - 'space_type': "M", + 'space_type': "m", } ) @@ -3459,9 +3459,9 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3470,12 +3470,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3530,12 +3530,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3548,12 +3548,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): executable=True, aliases=False, dest_data_set={ - 'type': "PDSE", - 'record_format': "U", + 'type': "pdse", + 'record_format': "u", 'record_length': 0, 'block_size': 32760, 'space_primary': 2, - 'space_type': "M", + 'space_type': "m", } ) else: @@ -3621,9 +3621,9 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3632,12 +3632,12 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3657,24 +3657,24 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) # allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3833,12 +3833,12 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) copy_uss_to_mvs_res = hosts.all.zos_copy( @@ -3884,7 +3884,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -3920,8 +3920,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDS") - hosts.all.zos_data_set(name=dest, type="PDS") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for member in ds_list: hosts.all.shell( @@ -3966,8 +3966,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDS") - hosts.all.zos_data_set(name=dest, type="PDS") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for src_member in src_ds_list: hosts.all.shell( @@ -4000,7 +4000,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4038,10 +4038,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="PDS", force=False), - dict(ds_type="PDS", force=True), - dict(ds_type="PDSE", force=False), - dict(ds_type="PDSE", force=True) + dict(ds_type="pds", force=False), + dict(ds_type="pds", force=True), + dict(ds_type="pdse", force=False), + dict(ds_type="pdse", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4085,7 +4085,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4130,7 +4130,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4176,7 +4176,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4212,10 +4212,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDS", force=False), - dict(type="PDS", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4224,7 +4224,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4257,7 +4257,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4267,8 +4267,8 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts.all.zos_data_set( type=dest_type, space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=25, ) @@ -4300,10 +4300,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDS", backup=None), - dict(type="PDS", backup="USER.TEST.PDS.BACKUP"), - dict(type="PDSE", backup=None), - dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), + dict(type="pds", backup=None), + dict(type="pds", backup="USER.TEST.PDS.BACKUP"), + dict(type="pdse", backup=None), + dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4349,7 +4349,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4365,8 +4365,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "SEQ": - hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') + if src_type != "seq": + hosts.all.zos_data_set(name=source_member, type="member", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4425,8 +4425,8 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): dest_ds = get_tmp_ds_name() try: - create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) - create_vsam_data_set(hosts, dest_ds, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, dest_ds, "ksds", add_data=True, key_length=12, key_offset=0) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, force=force) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") @@ -4461,8 +4461,8 @@ def test_backup_ksds(ansible_zos_module, backup): backup_name = None try: - create_vsam_data_set(hosts, src, "KSDS", add_data=True, key_length=12, key_offset=0) - create_vsam_data_set(hosts, dest, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, src, "ksds", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, dest, "ksds", add_data=True, key_length=12, key_offset=0) if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup, remote_src=True, force=True) @@ -4544,8 +4544,8 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): volume = volumes.get_available_vol() space_primary = 3 space_secondary = 2 - space_type = "K" - record_format = "VB" + space_type = "k" + record_format = "vb" record_length = 100 block_size = 21000 @@ -4556,7 +4556,7 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): remote_src=True, volume=volume, dest_data_set=dict( - type="SEQ", + type="seq", space_primary=space_primary, space_secondary=space_secondary, space_type=space_type, @@ -4587,7 +4587,7 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): assert len(output_lines) == 5 data_set_attributes = output_lines[2].strip().split() assert len(data_set_attributes) == 4 - assert data_set_attributes[0] == record_format + assert data_set_attributes[0] == record_format.upper() assert data_set_attributes[1] == str(record_length) assert data_set_attributes[2] == str(block_size) assert data_set_attributes[3] == "PS" @@ -4637,7 +4637,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 7ab4685c0..f96bfabdc 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -29,12 +29,12 @@ data_set_types = [ - ("PDS"), - ("SEQ"), - ("PDSE"), - ("ESDS"), - ("RRDS"), - ("LDS"), + ("pds"), + ("seq"), + ("pdse"), + ("esds"), + ("rrds"), + ("lds"), ] TEMP_PATH = "/tmp/jcl" @@ -161,7 +161,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 ) # verify data set creation was successful @@ -220,7 +220,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS" + src=TEMP_PATH + "/SAMPLE", location="uss" ) # verify data set creation was successful for result in results.contacted.values(): @@ -266,7 +266,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS" + src=TEMP_PATH + "/SAMPLE", location="uss" ) # verify data set creation was successful for result in results.contacted.values(): @@ -314,7 +314,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS" + src=TEMP_PATH + "/SAMPLE", location="uss" ) # verify data set creation was successful for result in results.contacted.values(): @@ -351,7 +351,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss") # verify data set creation was successful for result in results.contacted.values(): @@ -366,7 +366,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss") # verify data set creation was successful for result in results.contacted.values(): @@ -469,7 +469,7 @@ def test_batch_data_set_creation_and_deletion(ansible_zos_module): results = hosts.all.zos_data_set( batch=[ {"name": dataset, "state": "absent"}, - {"name": dataset, "type": "PDS", "state": "present"}, + {"name": dataset, "type": "pds", "state": "present"}, {"name": dataset, "state": "absent"}, ] ) @@ -486,11 +486,11 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": dataset, "type": "PDS", "directory_blocks": 5}, - {"name": dataset + "(newmem1)", "type": "MEMBER"}, + {"name": dataset, "type": "pds", "directory_blocks": 5}, + {"name": dataset + "(newmem1)", "type": "member"}, { "name": dataset + "(newmem2)", - "type": "MEMBER", + "type": "member", "state": "present", }, {"name": dataset, "state": "absent"}, @@ -534,7 +534,7 @@ def test_data_member_force_delete(ansible_zos_module): DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) # set up: # create pdse - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="PDSE", replace=True) + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) for result in results.contacted.values(): assert result.get("changed") is True @@ -543,25 +543,25 @@ def test_data_member_force_delete(ansible_zos_module): batch=[ { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, @@ -590,7 +590,7 @@ def test_data_member_force_delete(ansible_zos_module): results = hosts.all.zos_data_set( name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_2), state="absent", - type="MEMBER" + type="member" ) for result in results.contacted.values(): assert result.get("failed") is True @@ -598,7 +598,7 @@ def test_data_member_force_delete(ansible_zos_module): # attempt to delete MEMBER_3 with force option. results = hosts.all.zos_data_set( - name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="MEMBER", force=True + name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="member", force=True ) for result in results.contacted.values(): assert result.get("changed") is True @@ -610,7 +610,7 @@ def test_data_member_force_delete(ansible_zos_module): { "name": "{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_4), "state": "absent", - "type": "MEMBER", + "type": "member", "force": True } ] @@ -647,9 +647,9 @@ def test_repeated_operations(ansible_zos_module): DEFAULT_DATA_SET_NAME_WITH_MEMBER = DEFAULT_DATA_SET_NAME + "(MEM)" results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="PDS", + type="pds", space_primary=5, - space_type="CYL", + space_type="cyl", record_length=15, replace=True, ) @@ -660,7 +660,7 @@ def test_repeated_operations(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="PDS", + type="pds", replace=True, ) @@ -669,7 +669,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", replace=True + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", replace=True ) for result in results.contacted.values(): @@ -677,7 +677,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER" + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member" ) for result in results.contacted.values(): @@ -685,7 +685,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", state="absent" + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent" ) for result in results.contacted.values(): @@ -693,7 +693,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", state="absent" + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent" ) for result in results.contacted.values(): @@ -713,9 +713,9 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="SEQ", + type="seq", space_primary=5, - space_type="CYL", + space_type="cyl", record_length=15, volumes=[volume_1, volume_2], ) @@ -750,11 +750,11 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="KSDS", + type="ksds", key_length=5, key_offset=0, space_primary=5, - space_type="CYL", + space_type="cyl", volumes=[volume_1, volume_2], ) for result in results.contacted.values(): @@ -843,7 +843,7 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): @pytest.mark.parametrize( "filesystem", - ["HFS", "ZFS"], + ["hfs", "zfs"], ) def test_filesystem_create_and_mount(ansible_zos_module, filesystem): fulltest = True @@ -852,7 +852,7 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): try: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - if filesystem == "HFS": + if filesystem == "hfs": result0 = hosts.all.shell(cmd="zinfo -t sys") for result in result0.contacted.values(): sys_info = result.get("stdout_lines") @@ -909,7 +909,7 @@ def test_data_set_creation_zero_values(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", - type="KSDS", + type="ksds", replace=True, space_primary=5, space_secondary=0, @@ -941,7 +941,7 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): @pytest.mark.parametrize( "formats", - ["F","FB", "VB", "FBA", "VBA", "U"], + ["f","fb", "vb", "fba", "vba", "u"], ) def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): volumes = Volume_Handler(volumes_on_systems) @@ -955,7 +955,7 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): state="present", format=formats, space_primary="5", - space_type="M", + space_type="m", volume=volume_1, ) for result in results.contacted.values(): diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index e017450ff..4b74c8834 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -98,7 +98,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, type=ds_type, state="present" ) - if ds_type == "KSDS": + if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -545,7 +545,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) for result in results.contacted.values(): @@ -576,7 +576,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): hosts = ansible_zos_module mlq_size = 3 MVS_VS = get_tmp_ds_name(mlq_size) - create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) hosts.all.file(path=USS_DEST_FILE, state="touch") results = hosts.all.zos_encode( src=MVS_VS, @@ -611,7 +611,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): hosts = ansible_zos_module MVS_PS = get_tmp_ds_name() MVS_VS = get_tmp_ds_name() - create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_encode( @@ -635,7 +635,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module MVS_VS = get_tmp_ds_name() MVS_PDS = get_tmp_ds_name() - create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) MVS_PDS_MEMBER = MVS_PDS + '(MEM)' hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) hosts.all.zos_data_set( @@ -671,7 +671,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -803,7 +803,7 @@ def test_vsam_backup(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) hosts.all.file(path=TEMP_JCL_PATH, state="absent") # submit JCL to populate KSDS @@ -814,7 +814,7 @@ def test_vsam_backup(ansible_zos_module): ) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) hosts.all.zos_encode( diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b239bbbd9..5b8e7f878 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -89,8 +89,8 @@ def extract_member_name(data_set): def create_and_populate_test_ps_vb(ansible_zos_module, name): params=dict( name=name, - type='SEQ', - record_format='VB', + type='seq', + record_format='vb', record_length='3180', block_size='3190' ) @@ -112,7 +112,7 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None) Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the VSAM data set. - type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + type (str) -- Type of the VSAM (ksds, esds, rrds, lds) add_data (bool, optional) -- Whether to add records to the VSAM. key_length (int, optional) -- Key length (only for KSDS data sets). key_offset (int, optional) -- Key offset (only for KSDS data sets). @@ -122,7 +122,7 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None) type=ds_type, state="present" ) - if ds_type == "KSDS": + if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -188,7 +188,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS @@ -229,7 +229,7 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module TEST_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -264,7 +264,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(temp_jcl_path), location="uss", wait_time_s=30 ) hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( @@ -300,7 +300,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module src_ds = "TEST.VSAM.DATA" - create_vsam_data_set(hosts, src_ds, "KSDS", key_length=12, key_offset=0) + create_vsam_data_set(hosts, src_ds, "ksds", key_length=12, key_offset=0) params = dict(src=src_ds, dest="/tmp/", flat=True) dest_path = "/tmp/" + src_ds try: @@ -347,7 +347,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS @@ -368,7 +368,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -417,7 +417,7 @@ def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): name=pds_name, type="pds", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=25, ) @@ -438,12 +438,12 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): name=pds_name, type="pds", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=25, ) hosts.all.zos_data_set(name=pds_name, type="pds") - hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes") + hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") params = dict(src=pds_name + "(MYDATA)", dest="/tmp/", flat=True) dest_path = "/tmp/MYDATA" try: @@ -535,7 +535,7 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) @@ -566,11 +566,11 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) name=pds_name, type="pds", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=25, ) - hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes") + hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") os.mkdir(dest_path) with open(full_path, "w") as infile: infile.write(DUMMY_DATA) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 3a30d9510..37a67ddbc 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -124,7 +124,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): batch=[ dict( name=i + "(MEMBER)", - type="MEMBER", + type="member", state='present', replace='yes' ) for i in PDS_NAMES @@ -185,10 +185,10 @@ def test_exclude_members_from_matched_list(ansible_zos_module): batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES] ) find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] @@ -221,8 +221,8 @@ def test_find_data_sets_larger_than_size(ansible_zos_module): TEST_PS1 = 'TEST.PS.ONE' TEST_PS2 = 'TEST.PS.TWO' try: - res = hosts.all.zos_data_set(name=TEST_PS1, state="present", size="5m") - res = hosts.all.zos_data_set(name=TEST_PS2, state="present", size="5m") + res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_type="m", space_primary=5) + res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_type="m", space_primary=5) find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 @@ -236,7 +236,7 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module TEST_PS = 'USER.FIND.TEST' try: - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="k", space_primary=1) find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -344,10 +344,10 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES] ) find_res = hosts.all.zos_find( pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 584cd6d6d..e92d377d4 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -99,7 +99,7 @@ def test_zos_job_output_job_exists(ansible_zos_module): ) jobs = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None ) for job in jobs.contacted.values(): print(job) @@ -127,7 +127,7 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) result = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 8f6c6e072..11680ab57 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -57,13 +57,13 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=JDATA_SET_NAME, state="present", type="PDS", replace=True + name=JDATA_SET_NAME, state="present", type="pds", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait_time_s=10 + src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="data_set", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -90,13 +90,13 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=NDATA_SET_NAME, state="present", type="PDS", replace=True + name=NDATA_SET_NAME, state="present", type="pds", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait_time_s=10 + src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="data_set", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index c148b6223..f2f1582fa 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -411,8 +411,8 @@ def test_job_submit_PDS(ansible_zos_module, location): """ Test zos_job_submit with a PDS(MEMBER), also test the default value for 'location', ensure it works with and without the - value "DATA_SET". If default_location is True, then don't - pass a 'location:DATA_SET' allow its default to come through. + value "data_set". If default_location is True, then don't + pass a 'location:data_set' allow its default to come through. """ try: results = None @@ -424,7 +424,7 @@ def test_job_submit_PDS(ansible_zos_module, location): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -436,7 +436,7 @@ def test_job_submit_PDS(ansible_zos_module, location): ) else: results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait_time_s=30 + src="{0}(SAMPLE)".format(data_set_name), location="data_set", wait_time_s=30 ) for result in results.contacted.values(): @@ -456,7 +456,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="PDS", replace=True + name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( @@ -465,7 +465,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): ) results = hosts.all.zos_job_submit( src="{0}(SAMPLE)".format(DATA_SET_NAME_SPECIAL_CHARS), - location="DATA_SET", + location="data_set", ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -484,7 +484,7 @@ def test_job_submit_USS(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -499,7 +499,7 @@ def test_job_submit_LOCAL(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): print(result) @@ -513,7 +513,7 @@ def test_job_submit_LOCAL_extraR(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -526,7 +526,7 @@ def test_job_submit_LOCAL_BADJCL(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_BAD) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): # Expecting: The job completion code (CC) was not in the job log....." @@ -547,7 +547,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True, volumes=volume_1 + name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 ) hosts.all.shell( @@ -555,10 +555,10 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="uncataloged", type="PDS" + name=data_set_name, state="uncataloged", type="pds" ) - results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) + results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="data_set", volume=volume_1) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 @@ -580,7 +580,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -589,7 +589,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="DATA_SET", wait_time_s=wait_time_s) + location="data_set", wait_time_s=wait_time_s) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -613,7 +613,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -622,7 +622,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="DATA_SET", wait_time_s=wait_time_s) + location="data_set", wait_time_s=wait_time_s) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -646,7 +646,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -655,7 +655,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="DATA_SET", wait_time_s=wait_time_s) + location="data_set", wait_time_s=wait_time_s) for result in results.contacted.values(): assert result.get("msg") is not None @@ -682,7 +682,7 @@ def test_job_submit_max_rc(ansible_zos_module, args): f.write(JCL_FILE_CONTENTS_RC_8) results = hosts.all.zos_job_submit( - src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] + src=tmp_file.name, location="local", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] ) for result in results.contacted.values(): @@ -771,7 +771,7 @@ def test_job_submit_jinja_template(ansible_zos_module, args): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="LOCAL", + location="local", use_template=True, template_parameters=args["options"] ) @@ -794,7 +794,7 @@ def test_job_submit_full_input(ansible_zos_module): ) results = hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_PATH), - location="USS", + location="uss", volume=None, # This job used to set wait=True, but since it has been deprecated # and removed, it now waits up to 30 seconds. @@ -814,7 +814,7 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="local") import pprint for result in results.contacted.values(): assert result.get("changed") is False @@ -827,7 +827,7 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local") for result in results.contacted.values(): assert result.get("changed") is False @@ -843,7 +843,7 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -864,7 +864,7 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -887,7 +887,7 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -908,7 +908,7 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -946,7 +946,7 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=15 ) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 445c0edfe..cd1421f41 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -224,7 +224,7 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT) hosts = ansible_zos_module hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) - if DS_TYPE in ["PDS", "PDSE"]: + if DS_TYPE in ["pds", "pdse"]: DS_FULL_NAME = DS_NAME + "(MEM)" hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) @@ -238,10 +238,11 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT) def remove_ds_environment(ansible_zos_module, DS_NAME): hosts = ansible_zos_module hosts.all.zos_data_set(name=DS_NAME, state="absent") + # supported data set types -DS_TYPE = ['SEQ', 'PDS', 'PDSE'] +DS_TYPE = ['seq', 'pds', 'pdse'] # not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] +NS_DS_TYPE = ['esds', 'rrds', 'lds'] # The encoding will be only use on a few test ENCODING = [ 'ISO8859-1', 'UTF-8'] @@ -793,7 +794,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module - ds_type = "SEQ" + ds_type = "seq" kwargs = dict(backup_name=r"TMPHLQ\..") params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") content = TEST_CONTENT @@ -848,7 +849,7 @@ def test_ds_line_force(ansible_zos_module, dstype): MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT - if ds_type == "SEQ": + if ds_type == "seq": params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: params["path"] = default_data_set_name+"({0})".format(MEMBER_2) @@ -865,7 +866,7 @@ def test_ds_line_force(ansible_zos_module, dstype): ] ) # write memeber to verify cases - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) else: cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) @@ -900,7 +901,7 @@ def test_ds_line_force(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) +@pytest.mark.parametrize("dstype", ["pds","pdse"]) def test_ds_line_force_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype @@ -1022,7 +1023,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") hosts.all.zos_data_set(name=ds_name, type=ds_type) - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 1ec7c03f5..39fdd26dd 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -89,7 +89,7 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems): srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="mounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -99,7 +99,7 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems): hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path="/pythonx/", state="absent") @@ -112,10 +112,10 @@ def test_double_mount(ansible_zos_module, volumes_on_systems): volume_1 = volumes.get_available_vol() srcfn = create_sourcefile(hosts, volume_1) try: - hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") + hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted") # The duplication here is intentional... want to make sure it is seen mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="mounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -125,7 +125,7 @@ def test_double_mount(ansible_zos_module, volumes_on_systems): hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path="/pythonx/", state="absent") @@ -137,9 +137,9 @@ def test_remount(ansible_zos_module, volumes_on_systems): volume_1 = volumes.get_available_vol() srcfn = create_sourcefile(hosts, volume_1) try: - hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") + hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted") mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="remounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="remounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -148,7 +148,7 @@ def test_remount(ansible_zos_module, volumes_on_systems): hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path="/pythonx/", state="absent") @@ -180,7 +180,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ name=dest, type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -196,7 +196,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="mounted", persistent=dict(data_store=dest_path), ) @@ -209,7 +209,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path=tmp_file_filename, state="absent") @@ -219,7 +219,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ state="absent", type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -264,7 +264,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst name=dest, type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -283,7 +283,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="mounted", persistent=dict( data_store=dest_path, @@ -326,7 +326,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path=tmp_file_filename, state="absent") @@ -337,7 +337,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst state="absent", type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -349,7 +349,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="mounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -358,11 +358,11 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) finally: tmphlq = "TMPHLQ" persist_data_set = get_tmp_ds_name() - hosts.all.zos_data_set(name=persist_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=persist_data_set, state="present", type="seq") unmount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", tmp_hlq=tmphlq, persistent=dict(data_store=persist_data_set, backup=True) diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index ca5b6384d..cbddd4419 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -62,7 +62,7 @@ def test_disposition_new(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -86,7 +86,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -118,7 +118,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, default_volume = volumes.get_available_vol() default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -133,12 +133,12 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, return_content=dict(type="text"), replace=True, backup=True, - type="SEQ", + type="seq", space_primary=5, space_secondary=1, - space_type="M", + space_type="m", volumes=default_volume, - record_format="FB" + record_format="fb" ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -172,7 +172,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, disposition="new", - type="PDS", + type="pds", directory_blocks=15, return_content=dict(type="text"), ), @@ -197,7 +197,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=default_data_set, type="PDS", state="present", replace=True + name=default_data_set, type="pds", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,7 +234,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="SEQ", + type="seq", state="present", replace=True, volumes=[volume_1], @@ -267,11 +267,11 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch @pytest.mark.parametrize( "space_type,primary,secondary,expected", [ - ("TRK", 3, 1, 169992), - ("CYL", 3, 1, 2549880), - ("B", 3, 1, 56664), - ("K", 3, 1, 56664), - ("M", 3, 1, 3003192), + ("trk", 3, 1, 169992), + ("cyl", 3, 1, 2549880), + ("b", 3, 1, 56664), + ("k", 3, 1, 56664), + ("m", 3, 1, 3003192), ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): @@ -288,7 +288,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", space_primary=primary, space_secondary=secondary, space_type=space_type, @@ -315,7 +315,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte @pytest.mark.parametrize( "data_set_type", - ["PDS", "PDSE", "LARGE", "BASIC", "SEQ"], + ["pds", "pdse", "large", "basic", "seq"], ) def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -351,7 +351,7 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s @pytest.mark.parametrize( "data_set_type", - ["KSDS", "RRDS", "LDS", "ESDS"], + ["ksds", "rrds", "lds", "esds"], ) def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -374,7 +374,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste volumes=[volume_1], ), ) - if data_set_type != "KSDS" + if data_set_type != "ksds" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, @@ -400,7 +400,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste @pytest.mark.parametrize( "record_format", - ["U", "VB", "VBA", "FB", "FBA"], + ["u", "vb", "vba", "fb", "fba"], ) def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: @@ -453,7 +453,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="SEQ", + type="seq", state="present", replace=True, volumes=[volume_1], @@ -505,7 +505,7 @@ def test_return_text_content_encodings( default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="SEQ", + type="seq", state="present", replace=True, volumes=[volume_1], @@ -544,7 +544,7 @@ def test_reuse_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -555,7 +555,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", reuse=True, return_content=dict(type="text"), ), @@ -577,7 +577,7 @@ def test_replace_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -588,7 +588,7 @@ def test_replace_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, return_content=dict(type="text"), ), @@ -619,7 +619,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, return_content=dict(type="text"), ), @@ -636,7 +636,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, backup=True, return_content=dict(type="text"), @@ -687,7 +687,7 @@ def test_input_empty(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -719,7 +719,7 @@ def test_input_large(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -752,7 +752,7 @@ def test_input_provided_as_list(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -792,7 +792,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", ), ), dict( @@ -844,7 +844,7 @@ def test_input_return_text_content_encodings( dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", ), ), dict( @@ -1164,7 +1164,7 @@ def test_file_record_length(ansible_zos_module, record_length): @pytest.mark.parametrize( "record_format", - ["U", "VB", "VBA", "FB", "FBA"], + ["u", "vb", "vba", "fb", "fba"], ) def test_file_record_format(ansible_zos_module, record_format): try: @@ -1353,7 +1353,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ) ), @@ -1361,7 +1361,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", ) ), ], @@ -1391,8 +1391,8 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu hosts = ansible_zos_module default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() - hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="SEQ") + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1405,7 +1405,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, backup=True, return_content=dict(type="text"), @@ -1415,7 +1415,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", replace=True, backup=True, ) @@ -1462,7 +1462,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' - hosts.all.zos_data_set(name=default_data_set, state="present", type="PDS") + hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1482,7 +1482,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", ) ), ], @@ -1538,7 +1538,7 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", ) ), ], @@ -1766,7 +1766,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co try: hosts = ansible_zos_module default_data_set = "ANSIBLE.USER.PRIVATE.TEST" - hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 28cc0d77d..790f5b3ef 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -353,16 +353,16 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["fb", "vb",], ) def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -382,7 +382,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec replace=True ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{DATASET}({member})", @@ -392,7 +392,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -405,13 +405,13 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") archive_result = hosts.all.zos_archive( src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, dest_data_set=dict(name=DATASET, - type="SEQ", + type="seq", record_format=record_format, record_length=record_length), ) @@ -464,16 +464,16 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["fb", "vb",], ) def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -493,7 +493,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d replace=True ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{DATASET}({member})", @@ -503,7 +503,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -517,7 +517,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d format_dict = dict(name=format) format_dict["format_options"] = dict(use_adrdssu=True) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") archive_result = hosts.all.zos_archive( src=DATASET, dest=MVS_DEST_ARCHIVE, @@ -564,9 +564,9 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): @@ -580,7 +580,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -598,10 +598,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -640,9 +640,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set): @@ -656,7 +656,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -674,10 +674,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -726,9 +726,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set): @@ -742,7 +742,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -760,10 +760,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -808,9 +808,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set): @@ -824,7 +824,7 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -842,10 +842,10 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -885,9 +885,9 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) @pytest.mark.parametrize( @@ -911,7 +911,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -929,10 +929,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -974,16 +974,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["fb", "vb",], ) def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -1004,7 +1004,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da record_format=record_format, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{DATASET}({member})", @@ -1013,7 +1013,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -1027,7 +1027,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da format_dict = dict(name=format) format_dict["format_options"] = dict(use_adrdssu=True) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") archive_result = hosts.all.zos_archive( src=DATASET, dest=MVS_DEST_ARCHIVE, diff --git a/tests/unit/test_zos_backup_restore_unit.py b/tests/unit/test_zos_backup_restore_unit.py index a751a7599..5920febdb 100644 --- a/tests/unit/test_zos_backup_restore_unit.py +++ b/tests/unit/test_zos_backup_restore_unit.py @@ -93,7 +93,7 @@ def assert_args_invalid(zos_backup_restore, arguments): @pytest.mark.parametrize( - "space_type", ["K", "M", "G", "TRK", "CYL", "k", "m", "g", "trk", "cyl"] + "space_type", ["k", "m", "g", "trk", "cyl"] ) def test_valid_space_types(zos_backup_restore_mocker, space_type): valid_args = dict( diff --git a/tests/unit/test_zos_mvs_raw_unit.py b/tests/unit/test_zos_mvs_raw_unit.py index f528412da..e50734756 100644 --- a/tests/unit/test_zos_mvs_raw_unit.py +++ b/tests/unit/test_zos_mvs_raw_unit.py @@ -59,7 +59,7 @@ def run_command(self, *args, **kwargs): "new", "keep", "keep", - "CYL", + "cyl", 5, 1, "smsclas1", @@ -67,17 +67,17 @@ def run_command(self, *args, **kwargs): "smsclas1", 80, "SOMEKEYLAB100", - "LIBRARY", + "library", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "U", + "u", ), ( "data.set.name(mem1)", "shr", "delete", "keep", - "TRK", + "trk", "5", 1, "smsclas1", @@ -85,17 +85,17 @@ def run_command(self, *args, **kwargs): "smsclas3", 120, "somekeylab1", - "BASIC", + "basic", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "FB", + "fb", ), ( "DATA.NAME.HERE.NOW", "old", "catalog", "uncatalog", - "B", + "b", 55, "100", "SMSCLASS", @@ -103,17 +103,17 @@ def run_command(self, *args, **kwargs): "smscD@s3", 120, "keyfor342fdsme", - "LARGE", + "large", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "FBA", + "fba", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "G", + "g", 1, "9", "SMSCLASS", @@ -121,17 +121,17 @@ def run_command(self, *args, **kwargs): "", 120, "keyfor342fdsme", - "PDSE", + "pdse", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "VB", + "vb", ), ( "DAT$.now", "new", "delete", "keep", - "M", + "m", 1, 9, "SMSCLASS", @@ -139,10 +139,10 @@ def run_command(self, *args, **kwargs): "", 0, "", - "LDS", + "lds", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "VBA", + "vba", ), ], ) @@ -237,7 +237,7 @@ def test_argument_parsing_data_set( "delete", 0, 100, - "FB", + "fb", "record", "r", ["ocreat", "oappend", "onoctty"], @@ -248,14 +248,14 @@ def test_argument_parsing_data_set( "delete", 200, "100", - "FBA", + "fba", "record", "w", ["oappend", "osync"], ), - ("/u/OEUSR01", "keep", "delete", 0, 100, "VB", "binary", "rw", ["ononblock"]), - ("/u/testmeee", "keep", "delete", 0, 100, "VBA", "record", "read_only", []), - ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "U", "text", "write_only", []), + ("/u/OEUSR01", "keep", "delete", 0, 100, "vb", "binary", "rw", ["ononblock"]), + ("/u/testmeee", "keep", "delete", 0, 100, "vba", "record", "read_only", []), + ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "u", "text", "write_only", []), ], ) def test_argument_parsing_unix( @@ -338,7 +338,7 @@ def test_argument_parsing_unix( "old", "keep", "keep", - "CYL", + "cyl", 5, 1, "smsclas1", @@ -346,17 +346,17 @@ def test_argument_parsing_unix( "smsclas1", 80, "SOMEKEYLAB100", - "LIBRARY", + "library", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "U", + "u", ), ( "data.set.name(mem1waytoolong)", "excl", "delete", "keep", - "TRK", + "trk", "5", 1, "smsclas1", @@ -364,10 +364,10 @@ def test_argument_parsing_unix( "smsclas3", 120, "somekeylab1", - "BASIC", + "basic", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "FB", + "fb", ), ( "DATA.NAME.HERE.NOW", @@ -382,17 +382,17 @@ def test_argument_parsing_unix( "smscD@s3", 120, "keyfor342fdsme", - "LARGE", + "large", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "FBA", + "fba", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "G", + "g", 1, "9", "SMSCLASSsss", @@ -400,17 +400,17 @@ def test_argument_parsing_unix( "", 120, "keyfor342fdsme", - "PDSE", + "pdse", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "VB", + "vb", ), ( "DAT$.now", "new", "delete", "meep", - "M", + "m", 1, 9, "SMSCLASS", @@ -418,10 +418,10 @@ def test_argument_parsing_unix( "", 0, "", - "KSDSS", + "ksdss", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "VBA", + "vba", ), ], ) @@ -525,7 +525,7 @@ def test_argument_parsing_data_set_failure_path( "delete", 200, "100", - "FBA", + "fba", "record", "w", ["append", "osync"], @@ -537,12 +537,12 @@ def test_argument_parsing_data_set_failure_path( "delete", 0, 100, - "VBA", + "vba", "record", "read_only", ["hello"], ), - ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "U", "text", None, []), + ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "u", "text", None, []), ], ) def test_argument_parsing_unix_failure_path( @@ -620,7 +620,7 @@ def test_ksds_defaults( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "KSDS", + "type": "ksds", } }, ], @@ -663,7 +663,7 @@ def test_ksds_exception_key_length( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ESDS", + "type": "esds", "key_length": 5, } }, @@ -693,7 +693,7 @@ def test_ksds_exception_key_offset( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ESDS", + "type": "esds", "key_offset": 5, } }, From 18486dfee3a4f3705f3a4013637a3751cdf326a8 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:08:02 -0600 Subject: [PATCH 341/413] [Documentation][zos_data_set] Add and standarize docstrings on modules/zos_data_set.py (#1347) * First advance to docstrings on modules/zos_data_set.py * Add and standarize docstrings on modules/zos_data_set.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Modified docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1347-update-docstring-zos_data_set.yml | 3 + plugins/modules/zos_data_set.py | 292 ++++++++++++++++-- 2 files changed, 272 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/1347-update-docstring-zos_data_set.yml diff --git a/changelogs/fragments/1347-update-docstring-zos_data_set.yml b/changelogs/fragments/1347-update-docstring-zos_data_set.yml new file mode 100644 index 000000000..581ab1aa9 --- /dev/null +++ b/changelogs/fragments/1347-update-docstring-zos_data_set.yml @@ -0,0 +1,3 @@ +trivial: + - zos_data_set - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1347). \ No newline at end of file diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 446fd6fe7..b500eb84a 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -736,20 +736,27 @@ def get_individual_data_set_parameters(params): """Builds a list of data set parameters to be used in future operations. - Arguments: - params {dict} -- The parameters from + Parameters + ---------- + params : dict + The parameters from Ansible's AnsibleModule object module.params. - Raises: - ValueError: Raised if top-level parameters "name" - and "batch" are both provided. - ValueError: Raised if neither top-level parameters "name" - or "batch" are provided. - - Returns: - [list] -- A list of dicts where each list item + Returns + ------- + Union[dict] + A list of dicts where each list item represents one data set. Each dictionary holds the parameters (passed to the zos_data_set module) for the data set which it represents. + + Raises + ------ + ValueError + Raised if top-level parameters "name" + and "batch" are both provided. + ValueError + Raised if neither top-level parameters "name" + or "batch" are provided. """ if params.get("name") and params.get("batch"): raise ValueError( @@ -769,7 +776,31 @@ def get_individual_data_set_parameters(params): # * can be replaced by built-in def data_set_name(contents, dependencies): """Validates provided data set name(s) are valid. - Returns a list containing the name(s) of data sets.""" + Returns a list containing the name(s) of data sets. + + Parameters + ---------- + contents : str + Name of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the dependencies have a batch. + str + The data set name. + + Raises + ------ + ValueError + Data set name must be provided. + ValueError + Data set and member name must be provided. + ValueError + A value is invalid. + """ if dependencies.get("batch"): return None if contents is None: @@ -807,7 +838,25 @@ def data_set_name(contents, dependencies): # * dependent on state def space_type(contents, dependencies): """Validates provided data set unit of space is valid. - Returns the unit of space.""" + Returns the unit of space. + + Parameters + ---------- + contents : str + Unit of space of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + str + The data set unit of space. + + Raises + ------ + ValueError + Value provided is invalid. +""" if dependencies.get("state") == "absent": return "m" if contents is None: @@ -825,7 +874,27 @@ def space_type(contents, dependencies): # * dependent on state def sms_class(contents, dependencies): """Validates provided sms class is of valid length. - Returns the sms class.""" + Returns the sms class. + + Parameters + ---------- + contents : str + Name of the sms class. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + The sms class set name. + + Raises + ------ + ValueError + Value is invalid. + """ if dependencies.get("state") == "absent" or contents is None: return None if len(contents) < 1 or len(contents) > 8: @@ -840,7 +909,22 @@ def sms_class(contents, dependencies): def valid_when_state_present(contents, dependencies): """Ensures no arguments that are invalid when state!=present - are allowed.""" + are allowed. + + Parameters + ---------- + contents : str + Arguments to be validated. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + Valid arguments. + """ if dependencies.get("state") == "absent" or contents is None: return None return contents @@ -850,7 +934,27 @@ def valid_when_state_present(contents, dependencies): # * dependent on format def record_length(contents, dependencies): """Validates provided record length is valid. - Returns the record length as integer.""" + Returns the record length as integer. + + Parameters + ---------- + contents : str + Length of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + The data set length. + + Raises + ------ + ValueError + Value is invalid. + """ if dependencies.get("state") == "absent": return None contents = ( @@ -872,7 +976,26 @@ def record_length(contents, dependencies): # * dependent on state # * dependent on record_length def record_format(contents, dependencies): - """Validates data set format is valid.""" + """Validates data set format is valid. + Returns uppercase data set format. + + Parameters + ---------- + contents : str + Format of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + str + The data set format in uppercase. Default is 'FB'. + + Raises + ------ + ValueError + Value is invalid. + """ if dependencies.get("state") == "absent": return "fb" if contents is None: @@ -880,7 +1003,7 @@ def record_format(contents, dependencies): formats = "|".join(DATA_SET_FORMATS) if not re.fullmatch(formats, contents, re.IGNORECASE): raise ValueError( - "Value {0} is invalid for format argument. format must be of of the following: {1}.".format( + "Value {0} is invalid for format argument. format must be one of the following: {1}.".format( contents, ", ".join(DATA_SET_FORMATS) ) ) @@ -889,8 +1012,27 @@ def record_format(contents, dependencies): # * dependent on state def data_set_type(contents, dependencies): - """Validates data set type is valid.""" - # if dependencies.get("state") == "absent" and contents != "member": + """Validates data set type is valid. + Returns uppercase data set type. + + Parameters + ---------- + contents : str + Type of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + str + The data set type in uppercase. Default is PDS. + + Raises + ------ + ValueError + Value is invalid. + """ + # if dependencies.get("state") == "absent" and contents != "MEMBER": # return None if contents is None: return "pds" @@ -907,7 +1049,29 @@ def data_set_type(contents, dependencies): # * dependent on state def volumes(contents, dependencies): """Validates volume is valid. - Returns uppercase volume.""" + Returns uppercase volume. + + Parameters + ---------- + contents : str + Name of the volume. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + The volume name. + + Raises + ------ + ValueError + Argument is invalid. + ValueError + Volume is required when state is cataloged. + """ if contents is None: if dependencies.get("state") == "cataloged": raise ValueError("Volume is required when state==cataloged.") @@ -931,7 +1095,31 @@ def volumes(contents, dependencies): # * dependent on type def key_length(contents, dependencies): """Validates data set key length is valid. - Returns data set key length as integer.""" + Returns data set key length as integer. + + Parameters + ---------- + contents : str + key_length. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + int + key_length. + + Raises + ------ + ValueError + Argument is invalid. + ValueError + key_length was not provided when requesting KSDS data set. + ValueError + key_length can not be provided when type is not KSDS. + """ if dependencies.get("state") == "absent": return None if dependencies.get("type") == "ksds" and contents is None: @@ -953,7 +1141,31 @@ def key_length(contents, dependencies): # * dependent on key_length def key_offset(contents, dependencies): """Validates data set key offset is valid. - Returns data set key offset as integer.""" + Returns data set key offset as integer. + + Parameters + ---------- + contents : str + Key offset of the data set. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + int + Key offset of the data set. + + Raises + ------ + ValueError + Argument is invalid. + ValueError + key_offset was not provided when requesting KSDS data set. + ValueError + key_offset can not be provided when type is not KSDS. + """ if dependencies.get("state") == "absent": return None if dependencies.get("type") == "ksds" and contents is None: @@ -974,7 +1186,22 @@ def key_offset(contents, dependencies): def perform_data_set_operations(name, state, **extra_args): """Calls functions to perform desired operations on - one or more data sets. Returns boolean indicating if changes were made.""" + one or more data sets. Returns boolean indicating if changes were made. + + Parameters + ---------- + name : str + Name of the dataset. + state : str + State of the data sets. + **extra_args : dict + Properties of the data sets. + + Returns + ------- + bool + If changes were made. + """ changed = False # passing in **extra_args forced me to modify the acceptable parameters # for multiple functions in data_set.py including ensure_present, replace @@ -995,6 +1222,18 @@ def perform_data_set_operations(name, state, **extra_args): def parse_and_validate_args(params): + """Parse and validate args. + + Parameters + ---------- + params : dict + Params to validated and parsed. + + Returns + ------- + dict + Parsed args. + """ arg_defs = dict( # Used for batch data set args @@ -1202,6 +1441,13 @@ def parse_and_validate_args(params): def run_module(): + """Runs the module. + + Raises + ------ + fail_json + Any exception during processing of data set params. + """ # TODO: add logic to handle aliases during parsing module_args = dict( From ae2495657f545f1d890390e17b6cd26e962c418b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:08:29 -0600 Subject: [PATCH 342/413] [Documentation][zos_encode] Add and standarize docstrings on modules/zos_encode.py (#1348) * Add and standarize docstrings on modules/zos_encode.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Updated docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1348-update-docstring-zos_encode.yml | 3 + plugins/modules/zos_encode.py | 88 ++++++++++++++++++- 2 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1348-update-docstring-zos_encode.yml diff --git a/changelogs/fragments/1348-update-docstring-zos_encode.yml b/changelogs/fragments/1348-update-docstring-zos_encode.yml new file mode 100644 index 000000000..de9c11c17 --- /dev/null +++ b/changelogs/fragments/1348-update-docstring-zos_encode.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1348). \ No newline at end of file diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 1adc08c01..243abb2d9 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -295,6 +295,25 @@ def check_pds_member(ds, mem): + """Check if a member exists in a PDS. + + Parameters + ---------- + ds : str + PDS data set name. + mem : str + Member name to check if is under PDS. + + Returns + ------- + bool + If it is a member of the data set. + + Raises + ------ + EncodeError + Can not find member in provided dataset. + """ check_rc = False if mem in datasets.list_members(ds): check_rc = True @@ -304,7 +323,25 @@ def check_pds_member(ds, mem): def check_mvs_dataset(ds): - """ To call data_set utils to check if the MVS data set exists or not """ + """To call data_set utils to check if the MVS data set exists or not. + + Parameters + ---------- + ds : str + Data set name. + + Returns + ------- + tuple(bool,str) + If the data set exists and it's type. + + Raises + ------ + EncodeError + If data set is not cataloged. + EncodeError + Unable to determine data set type. + """ check_rc = False ds_type = None if not data_set.DataSet.data_set_exists(ds): @@ -321,7 +358,23 @@ def check_mvs_dataset(ds): def check_file(file): - """ check file is a USS file or an MVS data set """ + """Check file is a USS file or an MVS data set. + + Parameters + ---------- + file : str + File to check. + + Returns + ------- + tuple(bool,bool,str) + If is USS file, MVS dataset, and the dataset type. + + Raises + ------ + EncodeError + The data set is not partitioned. + """ is_uss = False is_mvs = False ds_type = None @@ -347,6 +400,18 @@ def check_file(file): def verify_uss_path_exists(file): + """Verify if USS path exists. + + Parameters + ---------- + file : str + Path of the file. + + Raises + ------ + EncodeError + File does not exist in the directory. + """ if not path.exists(file): mypath = "/" + file.split("/")[0] + "/*" ld = listdir(mypath) @@ -359,6 +424,13 @@ def verify_uss_path_exists(file): def run_module(): + """Runs the module. + + Raises + ------ + fail_json + Exception during execution. + """ module_args = dict( src=dict(type="str", required=True), dest=dict(type="str"), @@ -530,6 +602,18 @@ def run_module(): class EncodeError(Exception): def __init__(self, message): + """Error during encoding. + + Parameters + ---------- + message : str + Human readable string describing the exception. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = 'An error occurred during encoding: "{0}"'.format(message) super(EncodeError, self).__init__(self.msg) From 581fdb277d5add77c40807fb4695c7387ddb0e68 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:09:00 -0600 Subject: [PATCH 343/413] [Documentation][zos_fetch] Add and standarize docstrings on modules/zos_fetch.py (#1349) * Add and standarize docstrings on modules/zos_fetch.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Updated docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1349-update-docstring-zos_fetch.yml | 3 + plugins/modules/zos_fetch.py | 159 +++++++++++++++++- 2 files changed, 158 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1349-update-docstring-zos_fetch.yml diff --git a/changelogs/fragments/1349-update-docstring-zos_fetch.yml b/changelogs/fragments/1349-update-docstring-zos_fetch.yml new file mode 100644 index 000000000..a38504c36 --- /dev/null +++ b/changelogs/fragments/1349-update-docstring-zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1349). \ No newline at end of file diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index cc26b622b..fda237768 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -303,16 +303,50 @@ def __init__(self, module): self.module = module def _fail_json(self, **kwargs): - """ Wrapper for AnsibleModule.fail_json """ + """Wrapper for AnsibleModule.fail_json. + + Parameters + ---------- + **kwargs : dict + Arguments to pass to fail_json(). + """ self.module.fail_json(**kwargs) def _run_command(self, cmd, **kwargs): - """ Wrapper for AnsibleModule.run_command """ + """Wrapper for AnsibleModule.run_command. + + Parameters + ---------- + cmd : str + Command to run. + **kwargs : dict + Arguments to pass to run_command(). + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ return self.module.run_command(cmd, **kwargs) def _get_vsam_size(self, vsam): """Invoke IDCAMS LISTCAT command to get the record length and space used. Then estimate the space used by the VSAM data set. + + Parameters + ---------- + vsam : str + VSAM data set name. + + Returns + ------- + tuple(int,int,int) + Total size, max_recl and rec_total. + + Raises + ------ + fail_json + Unable to obtain data set information. """ space_pri = 0 total_size = 0 @@ -350,7 +384,27 @@ def _get_vsam_size(self, vsam): return total_size, max_recl, rec_total def _copy_vsam_to_temp_data_set(self, ds_name): - """ Copy VSAM data set to a temporary sequential data set """ + """Copy VSAM data set to a temporary sequential data set. + + Parameters + ---------- + ds_name : str + VSAM dataset name to be copied into a temp data set. + + Returns + ------- + str + Temporary dataset name. + + Raises + ------ + fail_json + OS error. + fail_json + cmd error while copying dataset. + fail_json + Failed to call IDCAMS. + """ mvs_rc = 0 vsam_size, max_recl, rec_total = self._get_vsam_size(ds_name) # Default in case of max recl being 80 to avoid failures when fetching and empty vsam. @@ -442,6 +496,25 @@ def _copy_vsam_to_temp_data_set(self, ds_name): def _fetch_uss_file(self, src, is_binary, encoding=None): """Convert encoding of a USS file. Return a tuple of temporary file name containing converted data. + + Parameters + ---------- + src : str + Source of the file. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + File name with the converted data. + + Raises + ------ + fail_json + Any exception ocurred while converting encoding. """ file_path = None if (not is_binary) and encoding: @@ -471,6 +544,25 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): def _fetch_vsam(self, src, is_binary, encoding=None): """Copy the contents of a VSAM to a sequential data set. Afterwards, copy that data set to a USS file. + + Parameters + ---------- + src : str + Source of the file. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + USS File containing the encoded content of the input data set. + + Raises + ------ + fail_json + Unable to delete temporary dataset. """ temp_ds = self._copy_vsam_to_temp_data_set(src) file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding) @@ -487,6 +579,27 @@ def _fetch_pdse(self, src, is_binary, encoding=None): """Copy a partitioned data set to a USS directory. If the data set is not being fetched in binary mode, encoding for all members inside the data set will be converted. + + Parameters + ---------- + src : str + Source of the dataset. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + Directory path containing the files of the converted data set members. + + Raises + ------ + fail_json + Error copying partitioned dataset to USS. + fail_json + Error converting encoding of the member. """ dir_path = tempfile.mkdtemp() cmd = "cp -B \"//'{0}'\" {1}" @@ -531,7 +644,28 @@ def _fetch_pdse(self, src, is_binary, encoding=None): def _fetch_mvs_data(self, src, is_binary, encoding=None): """Copy a sequential data set or a partitioned data set member - to a USS file + to a USS file. + + Parameters + ---------- + src : str + Source of the dataset. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + USS File containing the encoded content of the input data set. + + Raises + ------ + fail_json + Unable to copy to USS. + fail_json + Error converting encoding of the dataset. """ fd, file_path = tempfile.mkstemp() os.close(fd) @@ -571,6 +705,23 @@ def _fetch_mvs_data(self, src, is_binary, encoding=None): def run_module(): + """Runs the module. + + Raises + ------ + fail_json + When parameter verification fails. + fail_json + When the source does not exist or is uncataloged. + fail_json + When it's unable to determine dataset type. + fail_json + While gathering dataset information. + fail_json + When the data set member was not found inside a dataset. + fail_json + When the file does not have appropriate read permissions. + """ # ********************************************************** # # Module initialization # # ********************************************************** # From fdcbf5666c4890aecd9eec4cdeb85038b087fca8 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:09:27 -0600 Subject: [PATCH 344/413] [Documentation][zos_job_query] Add docstrings to modules/zos_job_query.py (#1353) * Add docstrings to modules/zos_job_query.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- .../1353-update-docstring-zos_job_query.yml | 3 ++ plugins/modules/zos_job_query.py | 44 ++++++++++++++++++- 2 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1353-update-docstring-zos_job_query.yml diff --git a/changelogs/fragments/1353-update-docstring-zos_job_query.yml b/changelogs/fragments/1353-update-docstring-zos_job_query.yml new file mode 100644 index 000000000..550be9107 --- /dev/null +++ b/changelogs/fragments/1353-update-docstring-zos_job_query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_query - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1353). \ No newline at end of file diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index aaa72d9ab..279a3955f 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -266,7 +266,15 @@ def run_module(): - + """Initialize the module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Any exception while getting job params. + """ module_args = dict( job_name=dict(type="str", required=False, default="*"), owner=dict(type="str", required=False), @@ -313,7 +321,27 @@ def run_module(): def query_jobs(job_name, job_id, owner): - + """Returns jobs that coincide with the given arguments. + + Parameters + ---------- + job_name : str + Name of the jobs. + job_id : str + Id of the jobs. + owner : str + Owner of the jobs. + + Returns + ------- + Union[str] + List with the jobs. + + Raises + ------ + RuntimeError + No job with was found. + """ jobs = [] if job_id: jobs = job_status(job_id=job_id) @@ -327,6 +355,18 @@ def query_jobs(job_name, job_id, owner): def parsing_jobs(jobs_raw): + """Parse job into an understandable format. + + Parameters + ---------- + jobs_raw : dict + Raw jobs. + + Returns + ------- + dict + Parsed jobs. + """ jobs = [] ret_code = {} for job in jobs_raw: From a5d6c35d113bc142042a7f04151f9df949e5d315 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:11:37 -0600 Subject: [PATCH 345/413] [Documentation][zos_lineinfile] Add and standarize docstrings on modules/zos_lineinfile.py (#1355) * Add and standarize docstrings on modules/zos_lineinfile.py * Create changelog fragment * Modify google style to numpy --- .../1355-update-docstring-zos_lineinfile.yml | 3 + plugins/modules/zos_lineinfile.py | 137 ++++++++++++------ 2 files changed, 98 insertions(+), 42 deletions(-) create mode 100644 changelogs/fragments/1355-update-docstring-zos_lineinfile.yml diff --git a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml new file mode 100644 index 000000000..3840b2862 --- /dev/null +++ b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1355). \ No newline at end of file diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index a6576af12..43e85061b 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -295,33 +295,45 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs, force): - """Replace a line with the matching regex pattern - Insert a line before/after the matching pattern - Insert a line at BOF/EOF - - Arguments: - src: {str} -- The z/OS USS file or data set to modify. - line: {str} -- The line to insert/replace into the src. - regexp: {str} -- The regular expression to look for in every line of the src. - If regexp matches, ins_aft/ins_bef will be ignored. - ins_aft: {str} -- Insert the line after matching '*regex*' pattern or EOF. - choices: - - EOF - - '*regex*' - ins_bef: {str} -- Insert the line before matching '*regex*' pattern or BOF. - choices: - - BOF - - '*regex*' - encoding: {str} -- Encoding of the src. - first_match: {bool} -- Take the first matching regex pattern. - backrefs: {bool} -- Back reference - force: {bool} -- force for modify a member part of a task in execution - - Returns: - str -- Information in JSON format. keys: - cmd: {str} -- dsed shell command - found: {int} -- Number of matching regex pattern - changed: {bool} -- Indicates if the source was modified. + """Replace a line with the matching regex pattern. + Insert a line before/after the matching pattern. + Insert a line at BOF/EOF. + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + line : str + The line to insert/replace into the src. + regexp : str + The regular expression to look for in every line of the src. + If regexp matches, ins_aft/ins_bef will be ignored. + ins_aft : str + Insert the line after matching '*regex*' pattern or EOF. + choices: + - EOF + - '*regex*' + ins_bef : str + Insert the line before matching '*regex*' pattern or BOF. + choices: + - BOF + - '*regex*' + encoding : str + Encoding of the src. + first_match : bool + Take the first matching regex pattern. + backrefs : bool + Back reference. + force : bool + force for modify a member part of a task in execution. + + Returns + ------- + str + Information in JSON format. keys: + cmd {str} -- dsed shell command + found {int} -- Number of matching regex pattern + changed {bool} -- Indicates if the source was modified. """ return datasets.lineinfile( src, @@ -339,26 +351,46 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs def absent(src, line, regexp, encoding, force): - """Delete lines with matching regex pattern - - Arguments: - src: {str} -- The z/OS USS file or data set to modify. - line: {str} -- The line to be deleted in the src. If line matches, - regexp will be ignored. - regexp: {str} -- The regular expression to look for in every line of the src. - encoding: {str} -- Encoding of the src. - force: {bool} -- force for modify a member part of a task in execution - - Returns: - str -- Information in JSON format. keys: - cmd: {str} -- dsed shell command - found: {int} -- Number of matching regex pattern - changed: {bool} -- Indicates if the source was modified. + """Delete lines with matching regex pattern. + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + line : str + The line to be deleted in the src. If line matches, + regexp will be ignored. + regexp : str + The regular expression to look for in every line of the src. + encoding : str + Encoding of the src. + force : bool + Force for modify a member part of a task in execution. + + Returns + ------- + str + Information in JSON format. keys: + cmd {str} -- dsed shell command + found {int} -- Number of matching regex pattern + changed {bool} -- Indicates if the source was modified. """ return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True, force=force) def quotedString(string): + """Add escape if string was quoted. + + Parameters + ---------- + string : str + Given string. + + Returns + ------- + str + The string with the quote marks replaced. + """ # add escape if string was quoted if not isinstance(string, str): return string @@ -366,6 +398,27 @@ def quotedString(string): def main(): + """Initialize the module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + regexp is required with backrefs=true. + fail_json + line is required with state=present. + fail_json + One of line or regexp is required with state=absent. + fail_json + Source does not exist. + fail_json + Data set type is NOT supported. + fail_json + Creating backup has failed. + fail_json + dsed return content is NOT in json format. + """ module_args = dict( src=dict( type='str', From 216baa5100fb35ff4f0ebdf11fed29b5c58c3b3b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:12:04 -0600 Subject: [PATCH 346/413] [Documentation][zos_script] Add and standarize docstrings on modules/zos_script.py (#1390) * Add and standarize docstrings on module-utils/zos_script.py * Add changelog fragment --- .../fragments/1390-update-docstring-zos_script.yml | 3 +++ plugins/modules/zos_script.py | 11 +++++++++++ 2 files changed, 14 insertions(+) create mode 100644 changelogs/fragments/1390-update-docstring-zos_script.yml diff --git a/changelogs/fragments/1390-update-docstring-zos_script.yml b/changelogs/fragments/1390-update-docstring-zos_script.yml new file mode 100644 index 000000000..792bf9698 --- /dev/null +++ b/changelogs/fragments/1390-update-docstring-zos_script.yml @@ -0,0 +1,3 @@ +trivial: + - zos_script - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1390). \ No newline at end of file diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index 0677d187d..e4f93ef21 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -229,6 +229,17 @@ def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + The given chdir does not exist on the system. + fail_json + The script terminated with an error. + """ module = AnsibleModule( argument_spec=dict( chdir=dict(type='str', required=False), From 54ea6baa1ddd00344b8c5c0b62e22f8f4a744857 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:12:21 -0600 Subject: [PATCH 347/413] [Documentation][zos_tso_command] Add and standarize docstrings on modules/zos_tso_command.py (#1391) * Add and standarize docstrings on module-utils/zos_tso_command.py * Add changelog fragment * Modified docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> --- .../1391-update-docstring-zos_tso_command.yml | 3 + plugins/modules/zos_tso_command.py | 65 +++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 changelogs/fragments/1391-update-docstring-zos_tso_command.yml diff --git a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml new file mode 100644 index 000000000..c435799d4 --- /dev/null +++ b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml @@ -0,0 +1,3 @@ +trivial: + - zos_tso_command - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1391). \ No newline at end of file diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 17e190fb2..2ac4a9d32 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -135,6 +135,23 @@ def run_tso_command(commands, module, max_rc): + """Run tso command. + + Parameters + ---------- + commands : str + Commands to run. + module : AnsibleModule + Ansible module to run the command with. + max_rc : int + Max return code. + + Returns + ------- + Union[dict] + The command result details. + + """ script = """/* REXX */ PARSE ARG cmd address tso @@ -152,6 +169,24 @@ def run_tso_command(commands, module, max_rc): def copy_rexx_and_run_commands(script, commands, module, max_rc): + """Copy rexx into a temporary file and run commands. + + Parameters + ---------- + script : str + Script to run the command. + commands : str + Commands to run. + module : AnsibleModule + Ansible module to run the command with. + max_rc : int + Max return code. + + Returns + ------- + Union[dict] + The command result details. + """ command_detail_json = [] delete_on_close = True tmp_file = NamedTemporaryFile(delete=delete_on_close) @@ -180,6 +215,25 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): def list_or_str_type(contents, dependencies): + """Checks if a variable contains a string or a list of strings and returns it as a list of strings. + + Parameters + ---------- + contents : str | list[str] + String or list of strings. + dependencies + Unused. + + Returns + ------- + str | Union[str] + The parameter given as a list of strings. + + Raises + ------ + ValueError + Invalid argument type. Expected "string or list of strings". + """ failed = False if isinstance(contents, list): for item in contents: @@ -200,6 +254,17 @@ def list_or_str_type(contents, dependencies): def run_module(): + """Initialize module. + + Raises + ------ + fail_json + ValueError on BetterArgParser. + fail_json + Some command(s) failed. + fail_json + An unexpected error occurred. + """ module_args = dict( commands=dict(type="raw", required=True, aliases=["command"]), max_rc=dict(type="int", required=False, default=0), From ec630df94655042fb5ccb7049c252ebfbd9d746c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:12:53 -0600 Subject: [PATCH 348/413] [Documentation][zos_volume_init] Add and standarize docstrings on modules/zos_volume_init.py (#1392) * Add and standarize docstrings on module-utils/zos_tso_command.py * Add changelog fragment --- .../fragments/1392-update-docstring-zos_volume_init.yml | 3 +++ plugins/modules/zos_volume_init.py | 6 ++++++ 2 files changed, 9 insertions(+) create mode 100644 changelogs/fragments/1392-update-docstring-zos_volume_init.yml diff --git a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml new file mode 100644 index 000000000..4536f186c --- /dev/null +++ b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml @@ -0,0 +1,3 @@ +trivial: + - zos_volume_init - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1392). \ No newline at end of file diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py index 6dbc9f97e..0be4f2a8f 100644 --- a/plugins/modules/zos_volume_init.py +++ b/plugins/modules/zos_volume_init.py @@ -230,7 +230,13 @@ def run_module(): + """Initialize the module. + Raises + ------ + fail_json + 'Index' cannot be False for SMS managed volumes. + """ module_args = dict( address=dict(type="str", required=True), verify_volid=dict(type="str", required=False), From 87218eabcc8a4b6ddc28f5026b403e8f933cf878 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:13:16 -0600 Subject: [PATCH 349/413] [Documentation][zos_apf] Add and standarize docstrings on modules/zos_apf.py (#1393) * Add and standarize docstrings on modules/zos_apf.py * Add changelog fragment * Modified docstring --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1393-update-docstring-zos_apf.yml | 3 ++ plugins/modules/zos_apf.py | 37 +++++++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 changelogs/fragments/1393-update-docstring-zos_apf.yml diff --git a/changelogs/fragments/1393-update-docstring-zos_apf.yml b/changelogs/fragments/1393-update-docstring-zos_apf.yml new file mode 100644 index 000000000..8a89b7aa0 --- /dev/null +++ b/changelogs/fragments/1393-update-docstring-zos_apf.yml @@ -0,0 +1,3 @@ +trivial: + - zos_apf - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1393). \ No newline at end of file diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 117801306..664b2e493 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -312,6 +312,30 @@ def backupOper(module, src, backup, tmphlq=None): + """Create a backup for a specified USS file or MVS data set. + + Parameters + ---------- + module : AnsibleModule + src : str + Source USS file or data set to backup. + backup : str + Name for the backup. + tmphlq : str + The name of the temporary high level qualifier to use. + + Returns + ------- + str + Backup name. + + Raises + ------ + fail_json + Data set type is NOT supported. + fail_json + Creating backup has failed. + """ # analysis the file type ds_utils = data_set.DataSetUtils(src) file_type = ds_utils.ds_type() @@ -336,6 +360,19 @@ def backupOper(module, src, backup, tmphlq=None): def main(): + """Initialize the module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Marker length may not exceed 72 characters. + fail_json + library is required. + fail_json + An exception occurred. + """ module = AnsibleModule( argument_spec=dict( library=dict( From 7abaa3618b3cbf9842ec5de347771356e5790c74 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:13:47 -0600 Subject: [PATCH 350/413] [Documentation][zos_operator_action_query] Add and standarize docstrings on modules/zos_operator_action_query.py (#1394) * Add and standarize docstrings on modules/zos_operator_action_query.py * Add changelog fragment * Modified docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...te_docstring-zos_operator_action_query.yml | 3 + plugins/modules/zos_operator_action_query.py | 281 +++++++++++++++++- 2 files changed, 272 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml diff --git a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml new file mode 100644 index 000000000..25c34fd89 --- /dev/null +++ b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator_action_query - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1394). \ No newline at end of file diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 55cd7cd00..ba6e4ee77 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -238,6 +238,15 @@ def run_module(): + """Initialize module. + + Raises + ------ + fail_json + A non-zero return code was received while querying the operator. + fail_json + An unexpected error occurred. + """ module_args = dict( system=dict(type="str", required=False), message_id=dict(type="str", required=False), @@ -317,6 +326,18 @@ def run_module(): def parse_params(params): + """Parse parameters using BetterArgParser. + + Parameters + ---------- + params : dict + Parameters to parse. + + Returns + ------- + dict + Parsed parameters. + """ arg_defs = dict( system=dict(arg_type=system_type, required=False), message_id=dict(arg_type=message_id_type, required=False), @@ -329,24 +350,85 @@ def parse_params(params): def system_type(arg_val, params): + """System type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + arg_val validated in uppercase. + """ regex = "^(?:[a-zA-Z0-9]{1,8})|(?:[a-zA-Z0-9]{0,7}[*])$" validate_parameters_based_on_regex(arg_val, regex) return arg_val.upper() def message_id_type(arg_val, params): + """Message id type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + arg_val validated in uppercase. + """ regex = "^(?:[a-zA-Z0-9]{1,})|(?:[a-zA-Z0-9]{0,}[*])$" validate_parameters_based_on_regex(arg_val, regex) return arg_val.upper() def job_name_type(arg_val, params): + """Job name type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + arg_val validated in uppercase. + """ regex = "^(?:[a-zA-Z0-9]{1,8})|(?:[a-zA-Z0-9]{0,7}[*])$" validate_parameters_based_on_regex(arg_val, regex) return arg_val.upper() def message_filter_type(arg_val, params): + """Message filter type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + regex of the given argument. + + Raises + ------ + ValidationError + An error occurred during validate the input parameters. + """ try: filter_text = arg_val.get("filter") use_regex = arg_val.get("use_regex") @@ -364,6 +446,25 @@ def message_filter_type(arg_val, params): def validate_parameters_based_on_regex(value, regex): + """Validate parameters based on regex. + + Parameters + ---------- + value : str + Argument to compare to regex pattern. + regex : str + Regex to get pattern from. + + Returns + ------- + str + The value given. + + Raises + ------ + ValidationError + An error occurred during validate the input parameters. + """ pattern = re.compile(regex) if pattern.fullmatch(value): pass @@ -373,7 +474,20 @@ def validate_parameters_based_on_regex(value, regex): def find_required_request(merged_list, params): - """Find the request given the options provided.""" + """Find the request given the options provided. + + Parameters + ---------- + merged_list : list + Merged list to search. + params : dict + Parameters to get for the function. + + Returns + ------- + Union + Filtered list. + """ requests = filter_requests(merged_list, params) return requests @@ -381,9 +495,24 @@ def find_required_request(merged_list, params): def create_merge_list(message_a, message_b, message_filter): """Merge the return lists that execute both 'd r,a,s' and 'd r,a,jn'. For example, if we have: - 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO'OR 'CANCEL'" + 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO' OR 'CANCEL'" 'd r,a,jn' response like:"742 R FVFNT29H &742 ARC0055A REPLY 'GO' OR 'CANCEL'" - the results will be merged so that a full list of information returned on condition""" + the results will be merged so that a full list of information returned on condition. + + Parameters + ---------- + message_a : str + Result coming from command 'd r,a,s'. + message_b : str + Result coming from command 'd r,a,jn'. + message_filter : str + Message filter. + + Returns + ------- + Union + Merge of the result of message_a and the result of message_b. + """ list_a = parse_result_a(message_a, message_filter) list_b = parse_result_b(message_b, message_filter) merged_list = merge_list(list_a, list_b) @@ -391,7 +520,20 @@ def create_merge_list(message_a, message_b, message_filter): def filter_requests(merged_list, params): - """filter the request given the params provided.""" + """Filter the request given the params provided. + + Parameters + ---------- + merged_list : list + Merged list to filter. + params : dict + Parameters to get for the function. + + Returns + ------- + Union + Filtered list. + """ system = params.get("system") message_id = params.get("message_id") job_name = params.get("job_name") @@ -406,6 +548,22 @@ def filter_requests(merged_list, params): def handle_conditions(merged_list, condition_type, value): + """Handle conditions. + + Parameters + ---------- + merged_list : list[dict] + List to check. + condition_type : str + Condition type to check. + value + Value to check for. + + Returns + ------- + Union[dict] + The new list. + """ # regex = re.compile(condition_values) newlist = [] exist = False @@ -422,6 +580,24 @@ def handle_conditions(merged_list, condition_type, value): def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + """Execute operator command. + + Parameters + ---------- + operator_cmd : str + Operator command. + timeout_s : int + Timeout to wait for the command execution, measured in centiseconds. + *args : dict + Arguments for the command. + **kwargs : dict + More arguments for the command. + + Returns + ------- + OperatorQueryResult + The result of the command. + """ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) @@ -433,6 +609,20 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): def match_raw_message(msg, message_filter): + """Match raw message. + + Parameters + ---------- + msg : str + Message to match. + message_filter : str + Filter for the message. + + Return + ------ + bool + If the pattern matches msg. + """ pattern = re.compile(message_filter, re.DOTALL) return pattern.match(msg) @@ -442,7 +632,20 @@ def parse_result_a(result, message_filter): there are usually two formats: - line with job_id: 810 R MV2D JOB58389 &810 ARC0055A REPLY 'GO' OR 'CANCEL' - line without job_id: 574 R MV28 *574 IXG312E OFFLOAD DELAYED FOR.. - also the request contains multiple lines, we need to handle that as well""" + also the request contains multiple lines, we need to handle that as well. + + Parameters + ---------- + result : str + Result coming from command 'd r,a,s'. + message_filter : str + Message filter. + + Returns + ------- + Union[dict[str,str]] + Resulting list. + """ dict_temp = {} list = [] @@ -474,7 +677,20 @@ def parse_result_a(result, message_filter): def parse_result_b(result, message_filter): """Parse the result that comes from command 'd r,a,jn', the main purpose to use this command is to get the job_name and message id, which is not - included in 'd r,a,s'""" + included in 'd r,a,s' + + Parameters + ---------- + result : str + Result coming from command 'd r,a,jn'. + message_filter : str + Message filter. + + Returns + ------- + Union[dict[str,str]] + Resulting list. + """ dict_temp = {} list = [] @@ -506,6 +722,20 @@ def parse_result_b(result, message_filter): def merge_list(list_a, list_b): + """Merge lists. + + Parameters + ---------- + list_a : list + First list to be merged. + list_b : list + Second list to be merged. + + Returns + ------- + Union + Merged of list_a and list_b. + """ merged_list = [] for dict_a in list_a: for dict_b in list_b: @@ -522,6 +752,18 @@ class Error(Exception): class ValidationError(Error): def __init__(self, message): + """An error occurred during validate the input parameters. + + Parameters + ---------- + message : str + Message of the error that ocurred. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = ( 'An error occurred during validate the input parameters: "{0}"'.format( message @@ -538,12 +780,27 @@ def __init__( ): """Response object class to manage the result from executing a command to query for actionable messages. Class will also generate a message - by concatenating stdout and stderr - - Arguments: - rc {str} -- The return code - stdout {str} -- The standard out of the command run - stderr {str} -- The standard error of the command run + by concatenating stdout and stderr. + + Parameters + ---------- + rc : str + The return code. + stdout : str + The standard out of the command run. + stderr : str + The standard error of the command run. + + Attributes + ---------- + rc : str + The return code. + stdout : str + The standard out of the command run. + stderr : str + The standard error of the command run. + message : str + The standard out of the command run. """ self.rc = rc self.stdout = stdout From 3d38011f67f1cf41e5a519f2b18bc4b412d8e911 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 16 Apr 2024 10:52:48 -0600 Subject: [PATCH 351/413] [Bugfix][1239][zos job submit]max_rc_more_than_0_doesn_not_put_change_as_true (#1345) * First iteration of solution * Change dataset * Ensure all cases for false * Remove print * Change behavior for bugfix * Add fragment * Fix latest lower case * Fix uppercase * Remove typo * Remove typo * Fix redundance * Fix test and upper cases * Fix test case * Fix fragment * Return to lower case * Return to lower case --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml | 5 +++++ plugins/modules/zos_job_submit.py | 4 +++- tests/functional/modules/test_zos_fetch_func.py | 5 +++-- tests/functional/modules/test_zos_job_submit_func.py | 6 +++--- 4 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml diff --git a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml new file mode 100644 index 000000000..a09b8fa64 --- /dev/null +++ b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_job_submit - when the argument max_rc was different than 0 the changed response returned + as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above + or equal to the value of the job. + (https://github.com/ansible-collections/ibm_zos_core/pull/1345). \ No newline at end of file diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1b56f459d..204c79217 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1108,7 +1108,9 @@ def assert_valid_return_code(max_rc, job_rc, ret_code, result): # should NOT be 'changed=true' even though the user did override the return code, # a non-zero return code means the job did not change anything, so set it as # result["chagned"]=False, - if job_rc != 0: + if max_rc and job_rc > max_rc: + return False + elif job_rc != 0 and max_rc is None: return False return True diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 5b8e7f878..4d72a6cc5 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -539,12 +539,12 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) - dest_path = "/tmp/" + ds_name + dest_path = "/tmp/" + TEST_PS with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) local_checksum = checksum(dest_path, hash_func=sha256) - params = dict(src=ds_name, dest="/tmp/", flat=True) + params = dict(src=TEST_PS, dest="/tmp/", flat=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -562,6 +562,7 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) pds_name = get_tmp_ds_name() dest_path = "/tmp/" + pds_name full_path = dest_path + "/MYDATA" + pds_name_mem = pds_name + "(MYDATA)" hosts.all.zos_data_set( name=pds_name, type="pds", diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index f2f1582fa..34fb39d4b 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -713,11 +713,11 @@ def test_job_submit_max_rc(ansible_zos_module, args): assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) elif args["max_rc"] == 12: - # Will not fail but changed will be false for the non-zero RC, there - # are other possibilities like an ABEND or JCL ERROR will fail this even + # Will not fail and as the max_rc is set to 12 and the rc is 8 is a change true + # there are other possibilities like an ABEND or JCL ERROR will fail this even # with a MAX RC assert result.get("msg") is None - assert result.get('changed') is False + assert result.get('changed') is True assert result.get("jobs")[0].get("ret_code").get("code") < 12 finally: hosts.all.file(path=tmp_file.name, state="absent") From b198d02c57b7b2a58f74893c2d56118bb0188f28 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 18 Apr 2024 12:01:13 -0600 Subject: [PATCH 352/413] [Bug][zos_find] Filter allocated space when using size filter (#1443) * Update zos_archive choices * Update zos_backup_restore choices * Update zos_copy choices * Update zos_data_set choices * Update module docs * Update zos_job_submit choices * Update zos_mount choices * Update zos_unarchive choices * Fix zos_archive and update its tests This also includes major work on zos_data_set since half of the test suite for zos_archive depends on creating data sets. * Update zos_backup_restore tests * Update zos_blockinfile tests * Update more modules * Updated more tests * Update zos_unarchive and zos_mount * Update zos_backup_restore unit tests * Corrected size value to use allocated size instead of utilized size * Added size fix * Updated test * Corrected test * Updated docs * Updated changelog * Added test --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- changelogs/fragments/1443-zos_find-filter-size.yml | 4 ++++ plugins/modules/zos_find.py | 3 ++- tests/functional/modules/test_zos_find_func.py | 14 +++++++++----- 3 files changed, 15 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1443-zos_find-filter-size.yml diff --git a/changelogs/fragments/1443-zos_find-filter-size.yml b/changelogs/fragments/1443-zos_find-filter-size.yml new file mode 100644 index 000000000..a5a8ce029 --- /dev/null +++ b/changelogs/fragments/1443-zos_find-filter-size.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size + for PDS/Es. + (https://github.com/ansible-collections/ibm_zos_core/pull/1443). \ No newline at end of file diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index b49d65f04..a12241458 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -31,6 +31,7 @@ author: - "Asif Mahmud (@asifmahmud)" - "Demetrios Dimatos (@ddimatos)" + - "Fernando Flores (@fernandofloresg)" options: age: description: @@ -479,7 +480,7 @@ def data_set_attribute_filter( age and not size and _age_filter(ds_age, now, age) ) or ( - size and not age and _size_filter(int(out[5]), size) + size and not age and _size_filter(int(out[6]), size) ) ): filtered_data_sets.add(ds) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 37a67ddbc..42a8db23e 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -15,6 +15,7 @@ __metaclass__ = type from ibm_zos_core.tests.helpers.volumes import Volume_Handler +import pytest SEQ_NAMES = [ "TEST.FIND.SEQ.FUNCTEST.FIRST", @@ -32,6 +33,8 @@ "TEST.FIND.VSAM.FUNCTEST.FIRST" ] +DATASET_TYPES = ['seq', 'pds', 'pdse'] + def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"): hosts = ansible_zos_module @@ -118,7 +121,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): search_string = "hello" try: hosts.all.zos_data_set( - batch=[dict(name=i, type='pds') for i in PDS_NAMES] + batch=[dict(name=i, type='pds', space_primary=1, space_type="m") for i in PDS_NAMES] ) hosts.all.zos_data_set( batch=[ @@ -216,13 +219,14 @@ def test_find_data_sets_older_than_age(ansible_zos_module): assert val.get('matched') == 2 -def test_find_data_sets_larger_than_size(ansible_zos_module): +@pytest.mark.parametrize("ds_type", DATASET_TYPES) +def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type): hosts = ansible_zos_module TEST_PS1 = 'TEST.PS.ONE' TEST_PS2 = 'TEST.PS.TWO' try: - res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_type="m", space_primary=5) - res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_type="m", space_primary=5) + res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_primary="1", space_type="m", type=ds_type) + res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_primary="1", space_type="m", type=ds_type) find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 @@ -236,7 +240,7 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module TEST_PS = 'USER.FIND.TEST' try: - hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="k", space_primary=1) + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_primary="1", space_type="k") find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 From 9b6b051097836a9d00a73377130f4d5af3f24e34 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:03:17 -0600 Subject: [PATCH 353/413] [Documentation][zos_find] Add and standarize docstrings on modules/zos_find.py (#1350) * Add and standarize docstrings on modules/zos_find.py * Create changelog fragment * Modify google style to numpy * Updated docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1350-update-docstring-zos_find.yml | 3 + plugins/modules/zos_find.py | 390 +++++++++++++----- 2 files changed, 297 insertions(+), 96 deletions(-) create mode 100644 changelogs/fragments/1350-update-docstring-zos_find.yml diff --git a/changelogs/fragments/1350-update-docstring-zos_find.yml b/changelogs/fragments/1350-update-docstring-zos_find.yml new file mode 100644 index 000000000..48c1fbce1 --- /dev/null +++ b/changelogs/fragments/1350-update-docstring-zos_find.yml @@ -0,0 +1,3 @@ +trivial: + - zos_find - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1350). \ No newline at end of file diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index a12241458..b269c472d 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -277,18 +277,28 @@ def content_filter(module, patterns, content): """ Find data sets that match any pattern in a list of patterns and - contains the given content - - Arguments: - module {AnsibleModule} -- The Ansible module object being used in the module - patterns {list[str]} -- A list of data set patterns - content {str} -- The content string to search for within matched data sets - - Returns: - dict[ps=set, pds=dict[str, str], searched=int] -- A dictionary containing + contains the given content. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used in the module. + patterns : list[str] + A list of data set patterns. + content : str + The content string to search for within matched data sets. + + Returns + ------- + dict[ps=set, pds=dict[str, str], searched=int] + A dictionary containing a set of matched "PS" data sets, a dictionary containing "PDS" data sets and members corresponding to each PDS, an int representing number of total data sets examined. + + Raises + ------ + fail_json: Non-zero return code received while executing ZOAU shell command 'dgrep'. """ filtered_data_sets = dict(ps=set(), pds=dict(), searched=0) for pattern in patterns: @@ -321,15 +331,25 @@ def content_filter(module, patterns, content): def data_set_filter(module, pds_paths, patterns): """ Find data sets that match any pattern in a list of patterns. - Arguments: - module {AnsibleModule} -- The Ansible module object being used - patterns {list[str]} -- A list of data set patterns - - Returns: - dict[ps=set, pds=dict[str, str], searched=int] -- A dictionary containing + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + patterns : list[str] + A list of data set patterns. + + Returns + ------- + dict[ps=set, pds=dict[str, str], searched=int] + A dictionary containing a set of matched "PS" data sets, a dictionary containing "PDS" data sets and members corresponding to each PDS, an int representing number of total data sets examined. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'dls'. """ filtered_data_sets = dict(ps=set(), pds=dict(), searched=0) patterns = pds_paths or patterns @@ -372,15 +392,21 @@ def pds_filter(module, pds_dict, member_patterns, excludes=None): """ Return all PDS/PDSE data sets whose members match any of the patterns in the given list of member patterns. - Arguments: - module {AnsibleModule} -- The Ansible module object being used in the module - pds_dict {dict[str, str]} -- A dictionary where each key is the name of - of the PDS/PDSE and the value is a list of - members belonging to the PDS/PDSE - member_patterns {list} -- A list of member patterns to search for - - Returns: - dict[str, set[str]] -- Filtered PDS/PDSE with corresponding members + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used in the module. + pds_dict : dict[str, str] + A dictionary where each key is the name of + of the PDS/PDSE and the value is a list of + members belonging to the PDS/PDSE. + member_patterns : list + A list of member patterns to search for. + + Returns + ------- + dict[str, set[str]] + Filtered PDS/PDSE with corresponding members. """ filtered_pds = dict() for pds, members in pds_dict.items(): @@ -412,12 +438,22 @@ def vsam_filter(module, patterns, resource_type, age=None): """ Return all VSAM data sets that match any of the patterns in the given list of patterns. - Arguments: - module {AnsibleModule} -- The Ansible module object being used - patterns {list[str]} -- A list of data set patterns - - Returns: - set[str]-- Matched VSAM data sets + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + patterns : list[str] + A list of data set patterns. + + Returns + ------- + set[str] + Matched VSAM data sets. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'vls'. """ filtered_data_sets = set() now = time.time() @@ -447,14 +483,26 @@ def data_set_attribute_filter( ): """ Filter data sets based on attributes such as age or size. - Arguments: - module {AnsibleModule} -- The Ansible module object being used - data_sets {set[str]} -- A set of data set names - size {int} -- The size, in bytes, that should be used to filter data sets - age {int} -- The age, in days, that should be used to filter data sets - - Returns: - set[str] -- Matched data sets filtered by age and size + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + data_sets : set[str] + A set of data set names. + size : int + The size, in bytes, that should be used to filter data sets. + age : int + The age, in days, that should be used to filter data sets. + + Returns + ------- + set[str] + Matched data sets filtered by age and size. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'dls'. """ filtered_data_sets = set() now = time.time() @@ -494,13 +542,24 @@ def volume_filter(module, data_sets, volumes): """Return only the data sets that are allocated in one of the volumes from the list of input volumes. - Arguments: - module {AnsibleModule} -- The Ansible module object - data_sets {set[str]} -- A set of data sets to be filtered - volumes {list[str]} -- A list of input volumes - - Returns: - set[str] -- The filtered data sets + Parameters + ---------- + module : AnsibleModule + The Ansible module object. + data_sets : set[str] + A set of data sets to be filtered. + volumes : list[str] + A list of input volumes. + + Returns + ------- + set[str] + The filtered data sets. + + Raises + ------ + fail_json + Unable to retrieve VTOC information. """ filtered_data_sets = set() for volume in volumes: @@ -518,15 +577,21 @@ def volume_filter(module, data_sets, volumes): def exclude_data_sets(module, data_set_list, excludes): - """Remove data sets that match any pattern in a list of patterns - - Arguments: - module {AnsibleModule} -- The Ansible module object being used - data_set_list {set[str]} -- A set of data sets to be filtered - excludes {list[str]} -- A list of data set patterns to be excluded - - Returns: - set[str] -- The remaining data sets that have not been excluded + """Remove data sets that match any pattern in a list of patterns. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + data_set_list : set[str] + A set of data sets to be filtered. + excludes : list[str] + A list of data set patterns to be excluded. + + Returns + ------- + set[str] + The remaining data sets that have not been excluded. """ for ds in set(data_set_list): for ex_pat in excludes: @@ -537,15 +602,21 @@ def exclude_data_sets(module, data_set_list, excludes): def _age_filter(ds_date, now, age): - """Determine whether a given date is older than 'age' - - Arguments: - ds_date {str} -- The input date in the format YYYY/MM/DD - now {float} -- The time elapsed since the last epoch - age {int} -- The age, in days, to compare against - - Returns: - bool -- Whether 'ds_date' is older than 'age' + """Determine whether a given date is older than 'age'. + + Parameters + ---------- + ds_date : str + The input date in the format YYYY/MM/DD. + now : float + The time elapsed since the last epoch. + age : int + The age, in days, to compare against. + + Returns + ------- + bool + Whether 'ds_date' is older than 'age'. """ year, month, day = list(map(int, ds_date.split("/"))) if year == "0000": @@ -561,14 +632,24 @@ def _age_filter(ds_date, now, age): def _get_creation_date(module, ds): - """Retrieve the creation date for a given data set - - Arguments: - module {AnsibleModule} -- The Ansible module object being used - ds {str} -- The name of the data set - - Returns: - str -- The data set creation date in the format "YYYY/MM/DD" + """Retrieve the creation date for a given data set. + + Arguments + --------- + module : AnsibleModule + The Ansible module object being used. + ds : str + The name of the data set. + + Returns + ------- + str + The data set creation date in the format "YYYY/MM/DD". + + Raises + ------ + fail_json + Non-zero return code received while retrieving data set age. """ rc, out, err = mvs_cmd.idcams( " LISTCAT ENT('{0}') HISTORY".format(ds), authorized=True @@ -596,14 +677,19 @@ def _get_creation_date(module, ds): def _size_filter(ds_size, size): - """ Determine whether a given size is greater than the input size - - Arguments: - ds_size {int} -- The input size, in bytes - size {int} -- The size, in bytes, to compare against - - Returns: - bool -- Whether 'ds_size' is greater than 'age' + """Determine whether a given size is greater than the input size. + + Parameters + ---------- + ds_size : int + The input size, in bytes. + size : int + The size, in bytes, to compare against. + + Returns + ------- + bool + Whether 'ds_size' is greater than 'age'. """ if size >= 0 and ds_size >= abs(size): return True @@ -613,15 +699,26 @@ def _size_filter(ds_size, size): def _match_regex(module, pattern, string): - """ Determine whether the input regex pattern matches the string - - Arguments: - module {AnsibleModule} -- The Ansible module object being used - pattern {str} -- The regular expression to match - string {str} -- The string to match - - Returns: - re.Match -- A Match object that matches the pattern to string + """Determine whether the input regex pattern matches the string. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + pattern : str + The regular expression to match. + string : str + The string to match. + + Returns + ------- + re.Match + A Match object that matches the pattern to string. + + Raises + ------ + fail_json + Invalid regular expression. """ try: return fullmatch(pattern, string, re.IGNORECASE) @@ -640,7 +737,28 @@ def _dgrep_wrapper( verbose=False, context=None ): - """A wrapper for ZOAU 'dgrep' shell command""" + """A wrapper for ZOAU 'dgrep' shell command. + + Parameters + ---------- + data_set_pattern : str + Data set pattern where to search for content. + content : str + Content to search across the data sets specified in data_set_pattern. + ignore_case : bool + Whether to ignore case or not. + line_num : bool + Whether to display line numbers. + verbose : bool + Extra verbosity, prints names of datasets being searched. + context : int + If context lines are requested, then up to <NUM> lines before and after the matching line are also printed. + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ dgrep_cmd = "dgrep" if ignore_case: dgrep_cmd += " -i" @@ -663,7 +781,28 @@ def _dls_wrapper( verbose=False, migrated=False ): - """A wrapper for ZOAU 'dls' shell command""" + """A wrapper for ZOAU 'dls' shell command. + + Parameters + ---------- + data_set_pattern : str + Data set pattern. + list_details : bool + Display detailed information based on the dataset type. + u_time : bool + Display last usage time. + size : bool + Display size in list. + verbose : bool + Display verbose information. + migrated : bool + Display migrated data sets. + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ dls_cmd = "dls" if migrated: dls_cmd += " -m" @@ -682,7 +821,22 @@ def _dls_wrapper( def _vls_wrapper(pattern, details=False, verbose=False): - """A wrapper for ZOAU 'vls' shell command""" + """A wrapper for ZOAU 'vls' shell command. + + Parameters + ---------- + pattern : str + Data set pattern. + details : bool + Display detailed information based on the dataset type. + verbose : bool + Display verbose information. + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ vls_cmd = "vls" if details: vls_cmd += " -l" @@ -694,6 +848,20 @@ def _vls_wrapper(pattern, details=False, verbose=False): def _match_resource_type(type1, type2): + """Compare that the two types match. + + Parameters + ---------- + type1 : str + One of the types that are expected to match. + type2 : str + One of the types that are expected to match. + + Returns + ------- + bool + If the types match. + """ if type1 == type2: return True if type1 == "CLUSTER" and type2 not in ("DATA", "INDEX"): @@ -702,13 +870,17 @@ def _match_resource_type(type1, type2): def _ds_type(ds_name): - """Utility function to determine the DSORG of a data set + """Utility function to determine the DSORG of a data set. - Arguments: - ds_name {str} -- The name of the data set + Parameters + ---------- + ds_name : str + The name of the data set. - Returns: - str -- The DSORG of the data set + Returns + ------- + str + The DSORG of the data set. """ rc, out, err = mvs_cmd.ikjeft01( " LISTDS '{0}'".format(ds_name), @@ -721,6 +893,25 @@ def _ds_type(ds_name): def run_module(module): + """Initialize parameters. + + Parameters + ---------- + module : AnsibleModule + Ansible Module. + + Returns + ------- + dict + Arguments. + + Raises + ------ + fail_json + Failed to process age. + fail_json + Failed to process size. + """ # Parameter initialization age = module.params.get('age') age_stamp = module.params.get('age_stamp') @@ -817,6 +1008,13 @@ def run_module(module): def main(): + """Initialize module when it's run as main. + + Raises + ------ + fail_json + Parameter verification failed. + """ module = AnsibleModule( argument_spec=dict( age=dict(type="str", required=False), From 9acac9f935b0133b5c4d46d45d78118bbc58e994 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:04:29 -0600 Subject: [PATCH 354/413] [Documentation][zos_gather_facts] Add and standarize docstrings on modules/zos_gather_facts.py (#1351) * Add and standarize docstrings on modules/zos_gather_facts.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...1351-update-docstring-zos_gather_facts.yml | 3 + plugins/modules/zos_gather_facts.py | 67 ++++++++++++++----- 2 files changed, 53 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1351-update-docstring-zos_gather_facts.yml diff --git a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml new file mode 100644 index 000000000..31fe8dfda --- /dev/null +++ b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml @@ -0,0 +1,3 @@ +trivial: + - zos_gather_facts - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1351). \ No newline at end of file diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index 2ea7b0baf..a9df42a49 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -128,11 +128,18 @@ def zinfo_facts_list_builder(gather_subset): """Builds a list of strings to pass into 'zinfo' based off the gather_subset list. - Arguments: - gather_subset {list} -- A list of subsets to pass in. - Returns: - [list[str]] -- A list of strings that contains sanitized subsets. - [None] -- An invalid value was received for the subsets. + + Parameters + ---------- + gather_subset : list + A list of subsets to pass in. + + Returns + ------- + Union[str] + A list of strings that contains sanitized subsets. + None + An invalid value was received for the subsets. """ if gather_subset is None or 'all' in gather_subset: return ["all"] @@ -157,11 +164,17 @@ def zinfo_facts_list_builder(gather_subset): def flatten_zinfo_json(zinfo_dict): """Removes one layer of mapping in the dictionary. Top-level keys correspond to zinfo subsets and are removed. - Arguments: - zinfo_dict {dict} -- A dictionary that contains the parsed result from - the zinfo json string. - Returns: - [dict] -- A flattened dictionary. + + Parameters + ---------- + zinfo_dict : dict + A dictionary that contains the parsed result from + the zinfo json string. + + Returns + ------- + dict + A flattened dictionary. """ d = {} for subset in list(zinfo_dict): @@ -172,13 +185,20 @@ def flatten_zinfo_json(zinfo_dict): def apply_filter(zinfo_dict, filter_list): """Returns a dictionary that contains only the keys which fit the specified filters. - Arguments: - zinfo_dict {dict} -- A flattened dictionary that contains results from - zinfo. - filter_list {list} -- A string list of shell wildcard patterns (i.e. - 'filters') to apply to the zinfo_dict keys. - Returns: - [dict] -- A dictionary with keys that are filtered out. + + Parameters + ---------- + zinfo_dict : dict + A flattened dictionary that contains results from + zinfo. + filter_list : list + A string list of shell wildcard patterns (i.e. + 'filters') to apply to the zinfo_dict keys. + + Returns + ------- + dict + A dictionary with keys that are filtered out. """ if filter_list is None or filter_list == [] or '*' in filter_list: @@ -193,6 +213,19 @@ def apply_filter(zinfo_dict, filter_list): def run_module(): + """Initialize module. + + Raises + ------ + fail_json + The zos_gather_facts module requires ZOAU >= 1.3.0. + fail_json + An invalid subset was passed to Ansible. + fail_json + An invalid subset was detected. + fail_json + An exception has occurred. Unable to gather facts. + """ # define available arguments/parameters a user can pass to the module module_args = dict( gather_subset=dict( From 3c9eae6592d6cc5cd7e0769e0ac79627a98cd5e4 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:05:01 -0600 Subject: [PATCH 355/413] [Documentation][zos_job_output] Add and standarize docstrings on modules/zos_job_output.py (#1352) * Add and standarize docstrings on modules/zos_job_output.py * Create changelog fragment * Modify google style to numpy --- .../1352-update-docstring-zos_job_output.yml | 3 +++ plugins/modules/zos_job_output.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 changelogs/fragments/1352-update-docstring-zos_job_output.yml diff --git a/changelogs/fragments/1352-update-docstring-zos_job_output.yml b/changelogs/fragments/1352-update-docstring-zos_job_output.yml new file mode 100644 index 000000000..78aac0cac --- /dev/null +++ b/changelogs/fragments/1352-update-docstring-zos_job_output.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_output - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1352). \ No newline at end of file diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index ed5a182d3..6a6328e67 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -431,6 +431,19 @@ def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + job_id or job_name or owner not provided. + fail_json + ZOAU exception. + fail_json + Any exception while fetching jobs. + """ module_args = dict( job_id=dict(type="str", required=False), job_name=dict(type="str", required=False), From f0b5d62855faf2b0531128a130f7bb088d75027b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:06:50 -0600 Subject: [PATCH 356/413] [Documentation][zos_mount] Add and standarize docstrings on modules/zos_mount.py (#1356) * Add and standarize docstrings on modules/zos_mount.py * Create changelog fragment * Modify google style to numpy * Updated docstring --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1356-update-docstring-zos_mount.yml | 3 + plugins/modules/zos_mount.py | 81 +++++++++++++++++-- 2 files changed, 79 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/1356-update-docstring-zos_mount.yml diff --git a/changelogs/fragments/1356-update-docstring-zos_mount.yml b/changelogs/fragments/1356-update-docstring-zos_mount.yml new file mode 100644 index 000000000..a2c09caa5 --- /dev/null +++ b/changelogs/fragments/1356-update-docstring-zos_mount.yml @@ -0,0 +1,3 @@ +trivial: + - zos_mount - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1356). \ No newline at end of file diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 61ca20b9f..8828d9005 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -564,6 +564,31 @@ def mt_backupOper(module, src, backup, tmphlq=None): + """Makes a backup of the source. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule. + src : str + Source USS file or MVS data set. + backup : str + Name for the backup. + tmphlq : str + HLQ to be used for backup dataset. + + Returns + ------- + str + Backup name. + + Raises + ------ + fail_json + Crating backup has failed. + fail_json + Data set type is NOT supported. + """ # analysis the file type ds_utils = data_set.DataSetUtils(src) file_type = ds_utils.ds_type() @@ -590,11 +615,24 @@ def mt_backupOper(module, src, backup, tmphlq=None): def swap_text(original, adding, removing): - """ - swap_text returns original after removing blocks matching removing, - and adding the adding param - original now should be a list of lines without newlines - return is the consolidated file value + """swap_text returns original after removing blocks matching removing, + and adding the adding param. + original now should be a list of lines without newlines. + return is the consolidated file value. + + Parameters + ---------- + original : str + Text to modify. + adding : str + Lines to add. + removing : str + Lines to delete if matched. + + Returns + ------- + str + The consolidated file value. """ content_lines = original @@ -658,6 +696,37 @@ def swap_text(original, adding, removing): def run_module(module, arg_def): + """Initialize module. + + Parameters + ---------- + arg_def : dict + Arguments to use. + + Returns + ------- + dict + Arguments. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Mount source either is not cataloged or does not exist. + fail_json + Exception encountered during directory creation. + fail_json + Mount destination doesn't exist. + fail_json + Checking filesystem list failed with error. + fail_json + Exception encountered when running unmount. + fail_json + Exception occurred when running mount. + fail_json + Persistent data set is either not cataloged or does not exist. + """ # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError # when a parameter fails its validation check @@ -1042,6 +1111,8 @@ def run_module(module, arg_def): def main(): + """Initialize module when executed as main. + """ global module module = AnsibleModule( From 46a21d20f1186c7a696f4121b901b347e985ee79 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:31:02 -0600 Subject: [PATCH 357/413] [Documentation][zos_job_submit] Add and standarize docstrings on modules/zos_job_submit.py (#1354) * Add and standarize docstrings on modules/zos_job_submit.py * Create changelog fragment * Modify google style to numpy * Corrected functino --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1354-update-docstring-zos_job_submit.yml | 3 + plugins/modules/zos_job_submit.py | 99 +++++++++++++++---- 2 files changed, 82 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/1354-update-docstring-zos_job_submit.yml diff --git a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml new file mode 100644 index 000000000..c2c0a4b99 --- /dev/null +++ b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1354). \ No newline at end of file diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 204c79217..bb3aac1ab 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -648,26 +648,48 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=None, start_time=timer()): - """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. - - Arguments: - module - module instnace to access the module api - src (str) - JCL, can be relative or absolute paths either on controller or USS - - Data set, can be PS, PDS, PDSE Member - src_name (str) - the src name that was provided in the module because through - the runtime src could be replace with a temporary file name - timeout (int) - how long to wait in seconds for a job to complete - is_unix (bool) - True if JCL is a file in USS, otherwise False; Note that all - JCL local to a controller is transfered to USS thus would be - True - volume (str) - volume the data set JCL is located on that will be cataloged before - being submitted - start_time - time the JCL started its submission - - Returns: - job_submitted_id - the JCL job ID returned from submitting a job, else if no - job submits, None will be returned - duration - how long the job ran for in this method + """Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. + + Parameters + ---------- + module: AnsibleModule + module instance to access the module api. + src : str + JCL, can be relative or absolute paths either on controller or USS + - Data set, can be PS, PDS, PDSE Member. + src_name : str + The src name that was provided in the module because through + the runtime src could be replace with a temporary file name. + timeout : int + How long to wait in seconds for a job to complete. + is_unix : bool + True if JCL is a file in USS, otherwise False; Note that all + JCL local to a controller is transfered to USS thus would be + True. + volume : str + volume the data set JCL is located on that will be cataloged before + being submitted. + start_time : int + time the JCL started its submission. + + Returns + ------- + str + the JCL job ID returned from submitting a job, else if no + job submits, None will be returned. + int + how long the job ran for in this method. + + Raises + ------ + fail_json + Unable to submit job because the data set could not be cataloged on the volume. + fail_json + Unable to submit job, the job submission has failed. + fail_json + The JCL has been submitted but there was an error while fetching its status. + fail_json + The job has been submitted and no job id was returned. """ kwargs = { @@ -801,6 +823,15 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + The value for option 'wait_time_s' is not valid. + """ module_args = dict( src=dict(type="str", required=True), location=dict( @@ -1078,6 +1109,34 @@ def run_module(): def assert_valid_return_code(max_rc, job_rc, ret_code, result): + """Asserts valid return code. + + Parameters + ---------- + max_rc : int + Max return code. + joc_rc : int + Job return code. + ret_code : int + Return code. + result : dict() + Result dictionary. + + Returns + ------- + bool + If job_rc is not 0. + + Raises + ------ + Exception + The job return code was not available in the jobs output. + Exception + The job return code for the submitted job is greater than the value set for option 'max_rc'. + Exception + The step return code for the submitted job is greater than the value set for option 'max_rc'. + """ + if job_rc is None: raise Exception( "The job return code (ret_code[code]) was not available in the jobs output, " From e2a574ffcbfdecb8f5c7df5c2761aff518fe297c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 10:58:24 -0600 Subject: [PATCH 358/413] [Documentation][validation] Add docstrings to module_utils/validation.py (#1336) * Add docstrings to module_utils/validation.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- .../1336-update-docstring-validation.yml | 3 ++ plugins/module_utils/validation.py | 30 +++++++++++++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1336-update-docstring-validation.yml diff --git a/changelogs/fragments/1336-update-docstring-validation.yml b/changelogs/fragments/1336-update-docstring-validation.yml new file mode 100644 index 000000000..547103d46 --- /dev/null +++ b/changelogs/fragments/1336-update-docstring-validation.yml @@ -0,0 +1,3 @@ +trivial: + - validation - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1336). \ No newline at end of file diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py index c08847503..fe41c0a01 100644 --- a/plugins/module_utils/validation.py +++ b/plugins/module_utils/validation.py @@ -22,11 +22,25 @@ def validate_safe_path(path): - """ - This function is implemented to validate against path traversal attack + """This function is implemented to validate against path traversal attack when using os.path.join function. In this action plugin, path is on the controller. + + Parameters + ---------- + path : str + A file's path. + + Returns + ------- + str + The introduced path. + + Raises + ------ + DirectoryTraversalError + User does not have access to a directory. """ if not os.path.isabs(path): real_path = os.path.realpath(path) @@ -39,6 +53,18 @@ def validate_safe_path(path): class DirectoryTraversalError(Exception): + """User does not have access to a directory. + + Parameters + ---------- + path : str + Directory path. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ def __init__(self, path): self.msg = "Detected directory traversal, user does not have access to {0}".format(path) super().__init__(self.msg) From b521c3d42b7a06c800ecd3bd348aa52c161cde92 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 10:58:59 -0600 Subject: [PATCH 359/413] [Documentation][job] Add docstrings to module_utils/job.py (#1333) * Add docstrings to module_utils/job.py * Create changelog fragment * Modified the google style to numpy * Update changelog fragment * Standarize numpy style --- .../fragments/1333-update-docstring-job.yml | 3 + plugins/module_utils/job.py | 146 +++++++++++++----- 2 files changed, 114 insertions(+), 35 deletions(-) create mode 100644 changelogs/fragments/1333-update-docstring-job.yml diff --git a/changelogs/fragments/1333-update-docstring-job.yml b/changelogs/fragments/1333-update-docstring-job.yml new file mode 100644 index 000000000..124ef2cae --- /dev/null +++ b/changelogs/fragments/1333-update-docstring-job.yml @@ -0,0 +1,3 @@ +trivial: + - job - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1333). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 25483b45d..72b72a90b 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -62,18 +62,29 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. - Keyword Arguments: - job_id (str) -- The job ID to search for (default: {None}) - owner (str) -- The owner of the job (default: {None}) - job_name (str) -- The job name search for (default: {None}) - dd_name (str) -- The data definition to retrieve (default: {None}) - dd_scan (bool) - Whether or not to pull information from the dd's for this job {default: {True}} - duration (int) -- The time the submitted job ran for - timeout (int) - how long to wait in seconds for a job to complete - start_time (int) - time the JCL started its submission - - Returns: - list[dict] -- The output information for a list of jobs matching specified criteria. + Keyword Parameters + ------------------ + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + The data definition to retrieve (default: {None}). + dd_scan : bool + Whether or not to pull information from the dd's for this job {default: {True}}. + duration : int + The time the submitted job ran for. + timeout : int + How long to wait in seconds for a job to complete. + start_time : int + Time the JCL started its submission. + + Returns + ------- + Union[dict] + The output information for a list of jobs matching specified criteria. If no job status is found it will return a ret_code diction with parameter 'msg_txt" = "The job could not be found. """ @@ -127,6 +138,26 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru def _job_not_found(job_id, owner, job_name, dd_name): + """Returns the information of a not founded job. + + Keyword Parameters + ------------------ + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + The data definition to retrieve (default: {None}). + + Returns + ------- + Union[dict] + The empty job information in a list. + If no job status is found it will return a ret_code diction with + parameter 'msg_txt" = "The job could not be found. + """ # Note that the text in the msg_txt is used in test cases and thus sensitive to change jobs = [] if job_id != '*' and job_name != '*': @@ -170,18 +201,25 @@ def _job_not_found(job_id, owner, job_name, dd_name): def job_status(job_id=None, owner=None, job_name=None, dd_name=None): """Get the status information of a z/OS job based on various search criteria. - Keyword Arguments: - job_id {str} -- The job ID to search for (default: {None}) - owner {str} -- The owner of the job (default: {None}) - job_name {str} -- The job name search for (default: {None}) - dd_name {str} -- If populated, return ONLY this DD in the job list (default: {None}) - note: no routines call job_status with dd_name, so we are speeding this routine with - 'dd_scan=False' - - Returns: - list[dict] -- The status information for a list of jobs matching search criteria. + Keyword Parameters + ------------------ + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + If populated, return ONLY this DD in the job list (default: {None}) + note: no routines call job_status with dd_name, so we are speeding this routine with + 'dd_scan=False'. + + Returns + ------- + Union[dict] + The status information for a list of jobs matching search criteria. If no job status is found it will return a ret_code diction with - parameter 'msg_txt" = "The job could not be found." + parameter 'msg_txt" = "The job could not be found.". """ arg_defs = dict( @@ -222,13 +260,17 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): def _parse_steps(job_str): - """Parse the dd section of output to retrieve step-wise CC's + """Parse the dd section of output to retrieve step-wise CC's. - Args: - job_str (str): The content for a given dd. + Parameters + ---------- + job_str : str + The content for a given dd. - Returns: - list[dict]: A list of step names listed as "step executed" the related CC. + Returns + ------- + Union[dict] + A list of step names listed as "step executed" the related CC. """ stp = [] if "STEP WAS EXECUTED" in job_str: @@ -245,6 +287,34 @@ def _parse_steps(job_str): def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): + """Get job status. + + Parameters + ---------- + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + The data definition to retrieve (default: {None}). + dd_scan : bool + Whether or not to pull information from the dd's for this job {default: {True}}. + duration : int + The time the submitted job ran for. + timeout : int + How long to wait in seconds for a job to complete. + start_time : int + Time the JCL started its submission. + + Returns + ------- + Union[dict] + The output information for a list of jobs matching specified criteria. + If no job status is found it will return a ret_code diction with + parameter 'msg_txt" = "The job could not be found. + """ if job_id == "*": job_id_temp = None else: @@ -431,19 +501,25 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T def _ddname_pattern(contents, resolve_dependencies): - """Resolver for ddname_pattern type arguments + """Resolver for ddname_pattern type arguments. - Arguments: - contents {bool} -- The contents of the argument. + Parameters + ---------- + contents : bool + The contents of the argument. resolved_dependencies {dict} -- Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type + Returns + ------- + str + The arguments contents after any necessary operations. - Returns: - str -- The arguments contents after any necessary operations. + Raises + ------ + ValueError + When contents is invalid argument type. """ if not re.fullmatch( r"^(?:[A-Z]{1}[A-Z0-9]{0,7})|(?:\?{1})$", From fe42127fc59c2f23d1a9f4f32193e30cffdeb8eb Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 10:59:24 -0600 Subject: [PATCH 360/413] [Documentation][ickdsf] Add docstrings to module_utils/ickdsf.py (#1331) * Add docstrings to module_utils/ickdsf.py * Create changelog fragment * Modified the google style to numpy * Update changelog fragment * Standarize numpy style --- .../1331-update-docstring-ickdsf.yml | 3 ++ plugins/module_utils/ickdsf.py | 32 +++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 changelogs/fragments/1331-update-docstring-ickdsf.yml diff --git a/changelogs/fragments/1331-update-docstring-ickdsf.yml b/changelogs/fragments/1331-update-docstring-ickdsf.yml new file mode 100644 index 000000000..545ba95c1 --- /dev/null +++ b/changelogs/fragments/1331-update-docstring-ickdsf.yml @@ -0,0 +1,3 @@ +trivial: + - ickdsf - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1331). \ No newline at end of file diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py index 67ddd3d9d..436750c21 100644 --- a/plugins/module_utils/ickdsf.py +++ b/plugins/module_utils/ickdsf.py @@ -26,6 +26,22 @@ def get_init_command(module, result, args): + """Get init command. + + Parameters + ---------- + module : obj + Object from the collection. + result : dic + Results dictionary. + args : dict + Arguments to be formatted. + + Returns + ------- + str + Formatted JCL strings for zos_mvs_raw. + """ # Get parameters from playbooks address = args.get('address') @@ -104,6 +120,22 @@ def get_init_command(module, result, args): def init(module, result, parsed_args): + """Init + + Parameters + ---------- + module : object + The module to give results of. + result : dict + The results of the process. + parsed_args : dict + Parsed arguments to be converted to command. + + Returns + ------- + dict + The dictionary with the results. + """ # Convert args parsed from module to ickdsf INIT command cmd = get_init_command(module, result, parsed_args) From 8da1199bdd717a19cd374f625e8a0c9e8cc3ee41 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 13:28:40 -0600 Subject: [PATCH 361/413] [Documentation][import handler] Add docstrings to module_utils/import_handler.py (#1332) * Add docstrings to module_utils/ickdsf.py * Add docstrings to module_utils/import_handler.py * Delete modifications to ickdsf.py * Create changelog fragment * Revert changes in ickdsf.py * Modified the google style to numpy * Update changelog fragment * Standarize numpy style * Update import_handler.py --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1332-update-docstring-import_handler.yml | 3 + plugins/module_utils/import_handler.py | 92 ++++++++++++++++--- 2 files changed, 81 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1332-update-docstring-import_handler.yml diff --git a/changelogs/fragments/1332-update-docstring-import_handler.yml b/changelogs/fragments/1332-update-docstring-import_handler.yml new file mode 100644 index 000000000..5b32cd32e --- /dev/null +++ b/changelogs/fragments/1332-update-docstring-import_handler.yml @@ -0,0 +1,3 @@ +trivial: + - import_handler - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1332). \ No newline at end of file diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py index a7b41a619..507dd2f65 100644 --- a/plugins/module_utils/import_handler.py +++ b/plugins/module_utils/import_handler.py @@ -15,8 +15,24 @@ class MissingZOAUImport(object): + """Error when importing ZOAU. + """ def __getattr__(self, name): def method(*args, **kwargs): + """Raises ImportError as a result of a failed ZOAU import. + + Parameters + ---------- + *args : dict + Arguments ordered in a dictionary. + **kwargs : dict + Arguments ordered in a dictionary. + + Raises + ------ + ImportError + Unable to import a module or library. + """ raise ImportError( ( "ZOAU is not properly configured for Ansible. Unable to import zoautil_py. " @@ -28,26 +44,34 @@ def method(*args, **kwargs): class ZOAUImportError(object): - """This class serves as a wrapper for any kind of error when importing - ZOAU. Since ZOAU is used by both modules and module_utils, we need a way - to alert the user when they're trying to use a function that couldn't be - imported properly. If we only had to deal with this in modules, we could - just validate that imports worked at the start of their main functions, - but on utils, we don't have an entry point where we can validate this. - Just raising an exception when trying the import would be better, but that - introduces a failure on Ansible sanity tests, so we can't do it. - - Instead, we'll replace what would've been a ZOAU library with this class, - and the moment ANY method gets called, we finally raise an exception. - """ - def __init__(self, exception_traceback): - """When creating a new instance of this class, we save the traceback + """This class serves as a wrapper for any kind of error when importing + ZOAU. Since ZOAU is used by both modules and module_utils, we need a way + to alert the user when they're trying to use a function that couldn't be + imported properly. If we only had to deal with this in modules, we could + just validate that imports worked at the start of their main functions, + but on utils, we don't have an entry point where we can validate this. + Just raising an exception when trying the import would be better, but that + introduces a failure on Ansible sanity tests, so we can't do it. + + Instead, we'll replace what would've been a ZOAU library with this class, + and the moment ANY method gets called, we finally raise an exception. + When creating a new instance of this class, we save the traceback from the original exception so that users have more context when their task/code fails. The expected traceback is a string representation of it, not an actual traceback object. By importing `traceback` from the standard library and calling `traceback.format_exc()` we can get this string. + + Parameters + ---------- + exception_traceback : str + The formatted traceback of the exception. + + Attributes + ---------- + exception_traceback : str + The formatted traceback of the exception. """ self.traceback = exception_traceback @@ -58,6 +82,20 @@ def __getattr__(self, name): an error while importing ZOAU. """ def method(*args, **kwargs): + """Raises ImportError as a result of a failed ZOAU import. + + Parameters + ---------- + *args : dict + Arguments ordered in a dictionary. + **kwargs : dict + Arguments ordered in a dictionary. + + Raises + ------ + ImportError + Unable to import a module or library. + """ raise ImportError( ( "ZOAU is not properly configured for Ansible. Unable to import zoautil_py. " @@ -71,10 +109,36 @@ def method(*args, **kwargs): class MissingImport(object): def __init__(self, import_name=""): + """Error when it is unable to import a module due to it being missing. + + Parameters + ---------- + import_name : str + The name of the module to import. + + Attributes + ---------- + import_name : str + The name of the module to import. + """ self.import_name = import_name def __getattr__(self, name): def method(*args, **kwargs): + """Raises ImportError as a result of trying to import a missing module. + + Parameter + --------- + *args : dict + Arguments ordered in a dictionary. + **kwargs : dict + Arguments ordered in a dictionary. + + Raises + ------ + ImportError + Unable to import a module or library. + """ raise ImportError("Import {0} was not available.".format(self.import_name)) return method From 92653c5c50c77c87fa7a7afe4a13a5a19b53878e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 22 Apr 2024 11:53:43 -0600 Subject: [PATCH 362/413] [Bugfix][1301]Work_around_fix_false_positive (#1340) * First iteration work around * Get the fix stable and return test case * Add clean response * Fix sanity * Add stderr * Fix case sensitive * Comment fail test case * Fix upper case * Add fragment * Retur test case * Change fragment * Add coment and all cases * Add absent double quotes and special cases * Fix ansible sanity --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1340-Work_around_fix_false_positive.yml | 4 + plugins/modules/zos_blockinfile.py | 124 +++++++++++++++--- .../modules/test_zos_blockinfile_func.py | 38 +++--- 3 files changed, 129 insertions(+), 37 deletions(-) create mode 100644 changelogs/fragments/1340-Work_around_fix_false_positive.yml diff --git a/changelogs/fragments/1340-Work_around_fix_false_positive.yml b/changelogs/fragments/1340-Work_around_fix_false_positive.yml new file mode 100644 index 000000000..8e8360808 --- /dev/null +++ b/changelogs/fragments/1340-Work_around_fix_false_positive.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_blockinfile - Using double quotation marks inside a block resulted in a false + positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/1340). \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 8fd9701da..88f410cdb 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -414,6 +414,73 @@ def quotedString(string): return string.replace('"', "") +def quotedString_double_quotes(string): + # add escape if string was quoted + if not isinstance(string, str): + return string + return string.replace('"', '\\"') + + +def check_double_quotes(marker, ins_bef, ins_aft, block): + if marker: + if '"' in marker: + return True + if ins_bef: + if '"' in ins_bef: + return True + if ins_aft: + if '"' in ins_aft: + return True + if block: + if '"' in block: + return True + return False + + +def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=None, ins_aft=None): + block = block.replace('"', '\\"') + force = "-f" if force else "" + encoding = "-c {0}".format(encoding) if encoding else "" + marker = "-m \"{0}\"".format(marker) if marker else "" + if state: + if ins_aft: + if ins_aft == "EOF": + opts = f'"$ a\\{block}" "{src}"' + else: + opts = f'-s -e "/{ins_aft}/a\\{block}/$" -e "$ a\\{block}" "{src}"' + elif ins_bef: + if ins_bef == "BOF": + opts = f' "1 i\\{block}" "{src}" ' + else: + opts = f'-s -e "/{ins_bef}/i\\{block}/$" -e "$ a\\{block}" "{src}"' + + cmd = "dmod -b {0} {1} {2} {3}".format(force, encoding, marker, opts) + else: + cmd = """dmod -b {0} {1} {2} "//d" {4}""".format(force, encoding, marker, src) + + rc, stdout, stderr = module.run_command(cmd) + cmd = clean_command(cmd) + return rc, cmd + + +def clean_command(cmd): + cmd = cmd.replace('/c\\\\', '') + cmd = cmd.replace('/a\\\\', '', ) + cmd = cmd.replace('/i\\\\', '', ) + cmd = cmd.replace('$ a\\\\', '', ) + cmd = cmd.replace('1 i\\\\', '', ) + cmd = cmd.replace('/c\\', '') + cmd = cmd.replace('/a\\', '') + cmd = cmd.replace('/i\\', '') + cmd = cmd.replace('$ a\\', '') + cmd = cmd.replace('1 i\\', '') + cmd = cmd.replace('/d', '') + cmd = cmd.replace('\\\\d', '') + cmd = cmd.replace('\\n', '\n') + cmd = cmd.replace('\\"', '"') + return cmd + + def main(): module = AnsibleModule( argument_spec=dict( @@ -553,6 +620,7 @@ def main(): module.fail_json(msg=message) file_type = 0 + return_content = None if backup: # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided. # setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use @@ -566,29 +634,47 @@ def main(): result['backup_name'] = Backup.mvs_file_backup(dsn=src, bk_dsn=backup, tmphlq=tmphlq) except Exception as err: module.fail_json(msg="Unable to allocate backup {0} destination: {1}".format(backup, str(err))) + double_quotes_exists = check_double_quotes(marker, ins_bef, ins_aft, block) # state=present, insert/replace a block with matching regex pattern # state=absent, delete blocks with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force) + if double_quotes_exists: + rc, cmd = execute_dmod(src, block, quotedString_double_quotes(marker), force, encoding, True, module=module, + ins_bef=quotedString_double_quotes(ins_bef), ins_aft=quotedString_double_quotes(ins_aft)) + result['rc'] = rc + result['cmd'] = cmd + result['changed'] = True if rc == 0 else False + stderr = 'Failed to insert new entry' if rc != 0 else "" + else: + return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force) else: - return_content = absent(src, marker, encoding, force) - stdout = return_content.stdout_response - stderr = return_content.stderr_response - rc = return_content.rc - stdout = stdout.replace('/d', '\\\\d') - try: - # Try to extract information from stdout - # The triple double quotes is required for special characters (/_) been scape - ret = json.loads("""{0}""".format(stdout)) - except Exception: - messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) - if result.get('backup_name'): - messageDict['backup_name'] = result['backup_name'] - module.fail_json(**messageDict) - - result['cmd'] = ret['data']['commands'] - result['changed'] = ret['data']['changed'] - result['found'] = ret['data']['found'] + if double_quotes_exists: + rc, cmd = execute_dmod(src, block, quotedString_double_quotes(marker), force, encoding, False, module=module) + result['rc'] = rc + result['cmd'] = cmd + result['changed'] = True if rc == 0 else False + stderr = 'Failed to remove entry' if rc != 0 else "" + else: + return_content = absent(src, marker, encoding, force) + # ZOAU 1.3.0 generate false positive working with double quotes (") the call generate distinct return when using and not + if not double_quotes_exists: + stdout = return_content.stdout_response + stderr = return_content.stderr_response + rc = return_content.rc + stdout = stdout.replace('/d', '\\\\d') + try: + # Try to extract information from stdout + # The triple double quotes is required for special characters (/_) been scape + ret = json.loads("""{0}""".format(stdout)) + except Exception: + messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) + if result.get('backup_name'): + messageDict['backup_name'] = result['backup_name'] + module.fail_json(**messageDict) + + result['cmd'] = ret['data']['commands'] + result['changed'] = ret['data']['changed'] + result['found'] = ret['data']['found'] # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case # That information will be given with 'changed' and 'found' if len(stderr): diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 508a2ce8d..635da733e 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -864,24 +864,25 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): remove_uss_environment(ansible_zos_module) # Test case base on bug of dataset.blockifile -# GH Issue #1258 -#@pytest.mark.uss -#def test_uss_block_insert_with_doublequotes(ansible_zos_module): -# hosts = ansible_zos_module -# params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") -# full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] -# content = TEST_CONTENT_DOUBLEQUOTES -# try: -# set_uss_environment(ansible_zos_module, content, full_path) -# params["path"] = full_path -# results = hosts.all.zos_blockinfile(**params) -# for result in results.contacted.values(): -# assert result.get("changed") == 1 -# results = hosts.all.shell(cmd="cat {0}".format(params["path"])) -# for result in results.contacted.values(): -# assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES -# finally: -# remove_uss_environment(ansible_zos_module) +# GH Issue #1258 +@pytest.mark.uss +def test_uss_block_insert_with_doublequotes(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DOUBLEQUOTES + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + print(result) + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss @@ -1412,6 +1413,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): hosts.all.zos_data_set(name=ds_name, state="absent") +# Enhancemed #1339 @pytest.mark.ds @pytest.mark.parametrize("dstype", ["pds","pdse"]) def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): From 9503a19090b1f3ea85148b311f37db5d164fadd6 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 22 Apr 2024 16:33:22 -0400 Subject: [PATCH 363/413] Remove stack fault on longer running jobs (#1383) * commit with partial changelog. fix in job.py and correction to test module. * updated pr link in changelog fragment * Update job.py --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/971-bug-job_submit-can-stacktrace.yml | 6 ++++++ plugins/module_utils/job.py | 9 +++------ tests/functional/modules/test_zos_job_submit_func.py | 1 - 3 files changed, 9 insertions(+), 7 deletions(-) create mode 100644 changelogs/fragments/971-bug-job_submit-can-stacktrace.yml diff --git a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml new file mode 100644 index 000000000..e02daed4c --- /dev/null +++ b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml @@ -0,0 +1,6 @@ +trivial: + - job.py - generalized resolution of query_exception that may be thrown. + This should prevent the stack trace. + (https://github.com/ansible-collections/ibm_zos_core/pull/1383). + - test_zos_job_submit.py - Removed code that was hiding if a duration was not returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/1383). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 72b72a90b..c25789030 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -386,9 +386,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T try: list_of_dds = jobs.list_dds(entry.job_id) except exceptions.DDQueryException as err: - if 'BGYSC5201E' in str(err): - is_dd_query_exception = True - pass + is_dd_query_exception = True # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. # Idea here is to force a TYPRUN{HOLD|JCLHOLD|COPY} job to go the full wait duration since we have @@ -409,9 +407,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False except exceptions.DDQueryException as err: - if 'BGYSC5201E' in str(err): - is_dd_query_exception = True - continue + is_dd_query_exception = True + continue job["duration"] = duration for single_dd in list_of_dds: diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 34fb39d4b..1e231f60d 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -695,7 +695,6 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - duration = result.get('duration') if duration >= args["wait_time_s"]: From 5599d9ee3f1aa75130a2fd5e57dd1c883480ae46 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 6 May 2024 17:03:27 -0700 Subject: [PATCH 364/413] Update collection meta to reflect zoau 1.3 or later Signed-off-by: ddimatos <dimatos@gmail.com> --- meta/ibm_zos_core_meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 7e24bc280..9b4dfde5e 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.3.0" + - ">=1.3.0" From 147d46fde7a6d8b67d6f51769ae4eae93c687b57 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 6 May 2024 17:21:19 -0700 Subject: [PATCH 365/413] Update module copyrights according to preferred format Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/action/zos_copy.py | 2 +- plugins/action/zos_fetch.py | 2 +- plugins/action/zos_job_submit.py | 2 +- plugins/action/zos_ping.py | 2 +- plugins/modules/zos_archive.py | 2 +- plugins/modules/zos_copy.py | 2 +- plugins/modules/zos_encode.py | 2 +- plugins/modules/zos_fetch.py | 2 +- plugins/modules/zos_gather_facts.py | 2 +- plugins/modules/zos_job_output.py | 2 +- plugins/modules/zos_job_query.py | 2 +- plugins/modules/zos_mount.py | 2 +- plugins/modules/zos_mvs_raw.py | 2 +- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- plugins/modules/zos_ping.rexx | 2 +- plugins/modules/zos_unarchive.py | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index e3ea36dc8..7d9b4d3fd 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2023 +# Copyright (c) IBM Corporation 2019, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 611922bf3..56232f34f 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 8e06c340b..23c31cb95 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_ping.py b/plugins/action/zos_ping.py index 9d644d389..b3b2b328a 100644 --- a/plugins/action/zos_ping.py +++ b/plugins/action/zos_ping.py @@ -1,6 +1,6 @@ # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # Copyright (c) 2017 Ansible Project -# Copyright IBM Corporation 2020, 2021, 2022 +# Copyright IBM Corporation 2020, 2022 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index cbe96b65d..8a3961c8b 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 - 2024 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index da29f688a..489c71593 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 243abb2d9..b92fdc72b 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index fda237768..8b4d4809d 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index a9df42a49..eb7699cdb 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 - 2024 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 6a6328e67..f458c2981 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 279a3955f..fee64bdb1 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 8828d9005..f16ddfe29 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index bcac50a63..bbb187ef9 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 012a46c0c..f37471397 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ba6e4ee77..ed426e9b5 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index beca54c3b..78e09f6b5 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -1,7 +1,7 @@ /* rexx __ANSIBLE_ENCODE_EBCDIC__ */ /* WANT_JSON */ -/* Copyright (c) IBM Corporation 2019, 2020, 2023 */ +/* Copyright (c) IBM Corporation 2019, 2023 */ /* Licensed under the Apache License, Version 2.0 (the "License"); */ /* you may not use this file except in compliance with the License. */ diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 31d709a3a..f824459c4 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 - 2024 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 8e6c7b051898ca87b63fbcc53eab6e659c0178ad Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 6 May 2024 17:49:38 -0700 Subject: [PATCH 366/413] Fix mdouels doc so asterisk are properly shown Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_apf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 664b2e493..ecf7df74c 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -59,7 +59,7 @@ - The identifier for the volume containing the library specified in the C(library) parameter. The values must be one the following. - 1. The volume serial number. - - 2. Six asterisks (******), indicating that the system must use the + - 2. Six asterisks C(******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. @@ -176,7 +176,7 @@ specified on the C(library) parameter. The values must be one of the following. - 1. The volume serial number - - 2. Six asterisks (******), indicating that the system must use the + - 2. Six asterisks C(******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial From bd9d7d67d7cac6f246e226044532b90650c5fe7a Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 6 May 2024 17:50:42 -0700 Subject: [PATCH 367/413] Update module docs Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_apf.rst | 68 ++-- docs/source/modules/zos_apf.rst-e | 68 ++-- docs/source/modules/zos_archive.rst | 88 ++--- docs/source/modules/zos_backup_restore.rst | 70 ++-- docs/source/modules/zos_blockinfile.rst | 52 +-- docs/source/modules/zos_copy.rst | 180 ++++----- docs/source/modules/zos_data_set.rst | 154 ++++---- docs/source/modules/zos_encode.rst | 32 +- docs/source/modules/zos_fetch.rst | 18 +- docs/source/modules/zos_find.rst | 20 +- docs/source/modules/zos_gather_facts.rst | 14 +- docs/source/modules/zos_job_output.rst | 16 +- docs/source/modules/zos_job_query.rst | 20 +- docs/source/modules/zos_job_submit.rst | 58 +-- docs/source/modules/zos_lineinfile.rst | 68 ++-- docs/source/modules/zos_mount.rst | 86 ++--- docs/source/modules/zos_mvs_raw.rst | 354 +++++++++--------- docs/source/modules/zos_operator.rst | 2 +- .../modules/zos_operator_action_query.rst | 20 +- docs/source/modules/zos_ping.rst | 8 +- docs/source/modules/zos_script.rst | 32 +- docs/source/modules/zos_tso_command.rst | 4 +- docs/source/modules/zos_unarchive.rst | 58 +-- docs/source/modules/zos_volume_init.rst | 34 +- docs/source/plugins.rst | 3 +- 25 files changed, 764 insertions(+), 763 deletions(-) diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 73d616e76..c7e0db588 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -37,7 +37,7 @@ library state - Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . + Ensure that the library is added ``state=present`` or removed ``state=absent``. The APF list format has to be "DYNAMIC". @@ -58,24 +58,24 @@ force_dynamic volume - The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. + The identifier for the volume containing the library specified in the ``library`` parameter. The values must be one the following. 1. The volume serial number. - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + If ``volume`` is not specified, ``library`` has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. + If ``sms=True``, ``volume`` value will be ignored. | **required**: False | **type**: bool @@ -83,13 +83,13 @@ sms operation - Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ + Change APF list format to "DYNAMIC" ``operation=set_dynamic`` or "STATIC" ``operation=set_static`` - Display APF list current format \ :literal:`operation=check\_format`\ + Display APF list current format ``operation=check_format`` - Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. + Display APF list entries when ``operation=list`` ``library``, ``volume`` and ``sms`` will be used as filters. - If \ :literal:`operation`\ is not set, add or remove operation will be ignored. + If ``operation`` is not set, add or remove operation will be ignored. | **required**: False | **type**: str @@ -99,23 +99,23 @@ operation tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str persistent - Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ + Add/remove persistent entries to or from *data_set_name* - \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ + ``library`` will not be persisted or removed if ``persistent=None`` | **required**: False | **type**: dict data_set_name - The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. + The data set name used for persisting or removing a ``library`` from the APF list. | **required**: True | **type**: str @@ -124,13 +124,13 @@ persistent marker The marker line template. - \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". + ``{mark}`` will be replaced with "BEGIN" and "END". - Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. - \ :literal:`{mark}`\ length may not exceed 72 characters. + ``{mark}`` length may not exceed 72 characters. - The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format | **required**: False | **type**: str @@ -138,9 +138,9 @@ persistent backup - Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". + Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -152,11 +152,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -168,9 +168,9 @@ persistent batch A list of dictionaries for adding or removing libraries. - This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ + This is mutually exclusive with ``library``, ``volume``, ``sms`` - Can be used with \ :literal:`persistent`\ + Can be used with ``persistent`` | **required**: False | **type**: list @@ -185,24 +185,24 @@ batch volume - The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. + The identifier for the volume containing the library specified on the ``library`` parameter. The values must be one of the following. 1. The volume serial number - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + If ``volume`` is not specified, ``library`` has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If true \ :literal:`volume`\ will be ignored. + If true ``volume`` will be ignored. | **required**: False | **type**: bool @@ -283,9 +283,9 @@ Return Values stdout The stdout from ZOAU command apfadm. Output varies based on the type of operation. - state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + state> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm - operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC + operation> stdout of operation options list> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set_dynamic> Set to DYNAMIC set_static> Set to STATIC check_format> DYNAMIC or STATIC | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_apf.rst-e b/docs/source/modules/zos_apf.rst-e index ec8e6824c..60c036dcf 100644 --- a/docs/source/modules/zos_apf.rst-e +++ b/docs/source/modules/zos_apf.rst-e @@ -37,7 +37,7 @@ library state - Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . + Ensure that the library is added ``state=present`` or removed ``state=absent``. The APF list format has to be "DYNAMIC". @@ -58,24 +58,24 @@ force_dynamic volume - The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. + The identifier for the volume containing the library specified in the ``library`` parameter. The values must be one the following. 1. The volume serial number. - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + If ``volume`` is not specified, ``library`` has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. + If ``sms=True``, ``volume`` value will be ignored. | **required**: False | **type**: bool @@ -83,13 +83,13 @@ sms operation - Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ + Change APF list format to "DYNAMIC" ``operation=set_dynamic`` or "STATIC" ``operation=set_static`` - Display APF list current format \ :literal:`operation=check\_format`\ + Display APF list current format ``operation=check_format`` - Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. + Display APF list entries when ``operation=list`` ``library``, ``volume`` and ``sms`` will be used as filters. - If \ :literal:`operation`\ is not set, add or remove operation will be ignored. + If ``operation`` is not set, add or remove operation will be ignored. | **required**: False | **type**: str @@ -99,23 +99,23 @@ operation tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str persistent - Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ + Add/remove persistent entries to or from *data_set_name* - \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ + ``library`` will not be persisted or removed if ``persistent=None`` | **required**: False | **type**: dict data_set_name - The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. + The data set name used for persisting or removing a ``library`` from the APF list. | **required**: True | **type**: str @@ -124,13 +124,13 @@ persistent marker The marker line template. - \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". + ``{mark}`` will be replaced with "BEGIN" and "END". - Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. - \ :literal:`{mark}`\ length may not exceed 72 characters. + ``{mark}`` length may not exceed 72 characters. - The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format | **required**: False | **type**: str @@ -138,9 +138,9 @@ persistent backup - Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". + Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -152,11 +152,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -168,9 +168,9 @@ persistent batch A list of dictionaries for adding or removing libraries. - This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ + This is mutually exclusive with ``library``, ``volume``, ``sms`` - Can be used with \ :literal:`persistent`\ + Can be used with ``persistent`` | **required**: False | **type**: list @@ -185,24 +185,24 @@ batch volume - The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. + The identifier for the volume containing the library specified on the ``library`` parameter. The values must be one of the following. 1. The volume serial number - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + If ``volume`` is not specified, ``library`` has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If true \ :literal:`volume`\ will be ignored. + If true ``volume`` will be ignored. | **required**: False | **type**: bool @@ -283,9 +283,9 @@ Return Values stdout The stdout from ZOAU command apfadm. Output varies based on the type of operation. - state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + state> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm - operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC + operation> stdout of operation options list> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set_dynamic> Set to DYNAMIC set_static> Set to STATIC check_format> DYNAMIC or STATIC | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 3249f3ba8..f07aa931e 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -20,7 +20,7 @@ Synopsis - Sources for archiving must be on the remote z/OS system. - Supported sources are USS (UNIX System Services) or z/OS data sets. - The archive remains on the remote z/OS system. -- For supported archive formats, see option \ :literal:`format`\ . +- For supported archive formats, see option ``format``. @@ -35,7 +35,7 @@ src USS file paths should be absolute paths. - MVS data sets supported types are: \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . + MVS data sets supported types are: ``SEQ``, ``PDS``, ``PDSE``. VSAMs are not supported. @@ -68,7 +68,7 @@ format terse_pack - Compression option for use with the terse format, \ :emphasis:`name=terse`\ . + Compression option for use with the terse format, *name=terse*. Pack will compress records in a data set so that the output results in lossless data compression. @@ -88,14 +88,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. + When providing the *xmit_log_data_set* name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using \ :literal:`xmit`\ or \ :literal:`terse`\ . + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. | **required**: False | **type**: bool @@ -107,19 +107,19 @@ format dest The remote absolute path or data set where the archive should be created. - \ :emphasis:`dest`\ can be a USS file or MVS data set name. + *dest* can be a USS file or MVS data set name. - If \ :emphasis:`dest`\ has missing parent directories, they will be created. + If *dest* has missing parent directories, they will be created. - If \ :emphasis:`dest`\ is a nonexistent USS file, it will be created. + If *dest* is a nonexistent USS file, it will be created. - If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=true`\ , the existing \ :emphasis:`dest`\ will be deleted and recreated with attributes defined in the \ :emphasis:`dest\_data\_set`\ option or computed by the module. + If *dest* is an existing file or data set and *force=true*, the existing *dest* will be deleted and recreated with attributes defined in the *dest_data_set* option or computed by the module. - If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=false`\ or not specified, the module exits with a note to the user. + If *dest* is an existing file or data set and *force=false* or not specified, the module exits with a note to the user. - Destination data set attributes can be set using \ :emphasis:`dest\_data\_set`\ . + Destination data set attributes can be set using *dest_data_set*. - Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the \ :emphasis:`dest\_data\_set`\ option will improve performance. + Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the *dest_data_set* option will improve performance. | **required**: True | **type**: str @@ -128,9 +128,9 @@ dest exclude Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from src list and glob expansion. - Patterns (wildcards) can contain one of the following, \`?\`, \`\*\`. + Patterns (wildcards) can contain one of the following, `?`, `*`. - \* matches everything. + * matches everything. ? matches any single character. @@ -144,7 +144,7 @@ group When left unspecified, it uses the current group of the current use unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str @@ -153,13 +153,13 @@ group mode The permission of the destination archive file. - If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the src file. + *mode=preserve* means that the file will be given the same permissions as the src file. | **required**: False | **type**: str @@ -170,14 +170,14 @@ owner When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str remove - Remove any added source files , trees or data sets after module \ `zos\_archive <./zos_archive.html>`__\ adds them to the archive. Source files, trees and data sets are identified with option \ :emphasis:`src`\ . + Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *src*. | **required**: False | **type**: bool @@ -185,7 +185,7 @@ remove dest_data_set - Data set attributes to customize a \ :literal:`dest`\ data set to be archived into. + Data set attributes to customize a ``dest`` data set to be archived into. | **required**: False | **type**: dict @@ -208,18 +208,18 @@ dest_data_set space_primary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int space_secondary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -228,7 +228,7 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -236,7 +236,7 @@ dest_data_set record_format - If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`FB`\ ) + If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) Choices are case-sensitive. @@ -313,18 +313,18 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. | **required**: False | **type**: str force - If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ will be deleted. Otherwise it will be created with the \ :literal:`dest\_data\_set`\ attributes or default values if \ :literal:`dest\_data\_set`\ is not specified. + If set to ``true`` and the remote file or data set ``dest`` will be deleted. Otherwise it will be created with the ``dest_data_set`` attributes or default values if ``dest_data_set`` is not specified. - If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. + If set to ``false``, the file or data set will only be copied if the destination does not exist. - If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. + If set to ``false`` and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -392,11 +392,11 @@ Notes ----- .. note:: - This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos\_fetch to retrieve to the controller and then zos\_copy or zos\_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. + This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - When packing and using \ :literal:`use\_adrdssu`\ flag the module will take up to two times the space indicated in \ :literal:`dest\_data\_set`\ . + When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. - tar, zip, bz2 and pax are archived using python \ :literal:`tarfile`\ library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. + tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. @@ -416,27 +416,27 @@ Return Values state - The state of the input \ :literal:`src`\ . + The state of the input ``src``. - \ :literal:`absent`\ when the source files or data sets were removed. + ``absent`` when the source files or data sets were removed. - \ :literal:`present`\ when the source files or data sets were not removed. + ``present`` when the source files or data sets were not removed. - \ :literal:`incomplete`\ when \ :literal:`remove`\ was true and the source files or data sets were not removed. + ``incomplete`` when ``remove`` was true and the source files or data sets were not removed. | **returned**: always | **type**: str dest_state - The state of the \ :emphasis:`dest`\ file or data set. + The state of the *dest* file or data set. - \ :literal:`absent`\ when the file does not exist. + ``absent`` when the file does not exist. - \ :literal:`archive`\ when the file is an archive. + ``archive`` when the file is an archive. - \ :literal:`compress`\ when the file is compressed, but not an archive. + ``compress`` when the file is compressed, but not an archive. - \ :literal:`incomplete`\ when the file is an archive, but some files under \ :emphasis:`src`\ were not found. + ``incomplete`` when the file is an archive, but some files under *src* were not found. | **returned**: success | **type**: str @@ -454,7 +454,7 @@ archived | **type**: list arcroot - If \ :literal:`src`\ is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. + If ``src`` is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 6833279fa..66d0d0f4b 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -47,34 +47,34 @@ data_sets include - When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to include in the backup. + When *operation=backup*, specifies a list of data sets or data set patterns to include in the backup. - When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to include when restoring from a backup. + When *operation=restore*, specifies a list of data sets or data set patterns to include when restoring from a backup. - The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. + The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. - When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. + A question mark ``?`` or percent sign ``%`` matches a single character. | **required**: False | **type**: raw exclude - When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to exclude from the backup. + When *operation=backup*, specifies a list of data sets or data set patterns to exclude from the backup. - When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to exclude when restoring from a backup. + When *operation=restore*, specifies a list of data sets or data set patterns to exclude when restoring from a backup. - The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." + The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." - When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. + A question mark ``?`` or percent sign ``%`` matches a single character. | **required**: False | **type**: raw @@ -84,22 +84,22 @@ data_sets volume This applies to both data set restores and volume restores. - When \ :emphasis:`operation=backup`\ and \ :emphasis:`data\_sets`\ are provided, specifies the volume that contains the data sets to backup. + When *operation=backup* and *data_sets* are provided, specifies the volume that contains the data sets to backup. - When \ :emphasis:`operation=restore`\ , specifies the volume the backup should be restored to. + When *operation=restore*, specifies the volume the backup should be restored to. - \ :emphasis:`volume`\ is required when restoring a full volume backup. + *volume* is required when restoring a full volume backup. | **required**: False | **type**: str full_volume - When \ :emphasis:`operation=backup`\ and \ :emphasis:`full\_volume=True`\ , specifies that the entire volume provided to \ :emphasis:`volume`\ should be backed up. + When *operation=backup* and *full_volume=True*, specifies that the entire volume provided to *volume* should be backed up. - When \ :emphasis:`operation=restore`\ and \ :emphasis:`full\_volume=True`\ , specifies that the volume should be restored (default is dataset). + When *operation=restore* and *full_volume=True*, specifies that the volume should be restored (default is dataset). - \ :emphasis:`volume`\ must be provided when \ :emphasis:`full\_volume=True`\ . + *volume* must be provided when *full_volume=True*. | **required**: False | **type**: bool @@ -109,18 +109,18 @@ full_volume temp_volume Specifies a particular volume on which the temporary data sets should be created during the backup and restore process. - When \ :emphasis:`operation=backup`\ and \ :emphasis:`backup\_name`\ is a data set, specifies the volume the backup should be placed in. + When *operation=backup* and *backup_name* is a data set, specifies the volume the backup should be placed in. | **required**: False | **type**: str backup_name - When \ :emphasis:`operation=backup`\ , the destination data set or UNIX file to hold the backup. + When *operation=backup*, the destination data set or UNIX file to hold the backup. - When \ :emphasis:`operation=restore`\ , the destination data set or UNIX file backup to restore. + When *operation=restore*, the destination data set or UNIX file backup to restore. - There are no enforced conventions for backup names. However, using a common extension like \ :literal:`.dzp`\ for UNIX files and \ :literal:`.DZP`\ for data sets will improve readability. + There are no enforced conventions for backup names. However, using a common extension like ``.dzp`` for UNIX files and ``.DZP`` for data sets will improve readability. | **required**: True | **type**: str @@ -135,9 +135,9 @@ recover overwrite - When \ :emphasis:`operation=backup`\ , specifies if an existing data set or UNIX file matching \ :emphasis:`backup\_name`\ should be deleted. + When *operation=backup*, specifies if an existing data set or UNIX file matching *backup_name* should be deleted. - When \ :emphasis:`operation=restore`\ , specifies if the module should overwrite existing data sets with matching name on the target device. + When *operation=restore*, specifies if the module should overwrite existing data sets with matching name on the target device. | **required**: False | **type**: bool @@ -145,35 +145,35 @@ overwrite sms_storage_class - When \ :emphasis:`operation=restore`\ , specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. + When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. - When \ :emphasis:`operation=backup`\ , specifies the storage class to use for temporary data sets created during backup process. + When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. - If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str sms_management_class - When \ :emphasis:`operation=restore`\ , specifies the management class to use. The management class will also be used for temporary data sets created during restore process. + When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. - When \ :emphasis:`operation=backup`\ , specifies the management class to use for temporary data sets created during backup process. + When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. - If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str space - If \ :emphasis:`operation=backup`\ , specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. + If *operation=backup*, specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. - If \ :emphasis:`operation=restore`\ , specifies the amount of space to allocate for data sets temporarily created during the restore process. + If *operation=restore*, specifies the amount of space to allocate for data sets temporarily created during the restore process. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. - When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space`\ defaults to \ :literal:`1`\ , otherwise default is \ :literal:`25`\ + When *full_volume=True*, *space* defaults to ``1``, otherwise default is ``25`` | **required**: False | **type**: int @@ -182,9 +182,9 @@ space space_type The unit of measurement to use when defining data set space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. - When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space\_type`\ defaults to \ :literal:`g`\ , otherwise default is \ :literal:`m`\ + When *full_volume=True*, *space_type* defaults to ``g``, otherwise default is ``m`` | **required**: False | **type**: str @@ -203,7 +203,7 @@ hlq tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup data sets. - The default HLQ is the Ansible user that executes the module and if that is not available, then the value of \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. | **required**: False | **type**: str diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 8cd6f756c..f3eef5967 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -38,9 +38,9 @@ src state - Whether the block should be inserted or replaced using \ :emphasis:`state=present`\ . + Whether the block should be inserted or replaced using *state=present*. - Whether the block should be removed using \ :emphasis:`state=absent`\ . + Whether the block should be removed using *state=absent*. | **required**: False | **type**: str @@ -51,9 +51,9 @@ state marker The marker line template. - \ :literal:`{mark}`\ will be replaced with the values \ :literal:`in marker\_begin`\ (default="BEGIN") and \ :literal:`marker\_end`\ (default="END"). + ``{mark}`` will be replaced with the values ``in marker_begin`` (default="BEGIN") and ``marker_end`` (default="END"). - Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. | **required**: False | **type**: str @@ -63,7 +63,7 @@ marker block The text to insert inside the marker lines. - Multi-line can be separated by '\\n'. + Multi-line can be separated by '\n'. Any double-quotation marks will be removed. @@ -74,11 +74,11 @@ block insertafter If specified, the block will be inserted after the last match of the specified regular expression. - A special value \ :literal:`EOF`\ for inserting a block at the end of the file is available. + A special value ``EOF`` for inserting a block at the end of the file is available. - If a specified regular expression has no matches, \ :literal:`EOF`\ will be used instead. + If a specified regular expression has no matches, ``EOF`` will be used instead. - Choices are EOF or '\*regex\*'. + Choices are EOF or '*regex*'. Default is EOF. @@ -89,18 +89,18 @@ insertafter insertbefore If specified, the block will be inserted before the last match of specified regular expression. - A special value \ :literal:`BOF`\ for inserting the block at the beginning of the file is available. + A special value ``BOF`` for inserting the block at the beginning of the file is available. If a specified regular expression has no matches, the block will be inserted at the end of the file. - Choices are BOF or '\*regex\*'. + Choices are BOF or '*regex*'. | **required**: False | **type**: str marker_begin - This will be inserted at \ :literal:`{mark}`\ in the opening ansible block marker. + This will be inserted at ``{mark}`` in the opening ansible block marker. | **required**: False | **type**: str @@ -108,7 +108,7 @@ marker_begin marker_end - This will be inserted at \ :literal:`{mark}`\ in the closing ansible block marker. + This will be inserted at ``{mark}`` in the closing ansible block marker. | **required**: False | **type**: str @@ -116,9 +116,9 @@ marker_end backup - Specifies whether a backup of destination should be created before editing the source \ :emphasis:`src`\ . + Specifies whether a backup of destination should be created before editing the source *src*. - When set to \ :literal:`true`\ , the module creates a backup file or data set. + When set to ``true``, the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -130,15 +130,15 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *src* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name name must be an MVS data set name, and the dataset must not be preallocated. + If the source is an MVS data set, the backup_name name must be an MVS data set name, and the dataset must not be preallocated. - If the backup\_name is not provided, the default backup\_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. - If \ :emphasis:`src`\ is a data set member and backup\_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If *src* is a data set member and backup_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str @@ -147,14 +147,14 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str encoding - The character set of the source \ :emphasis:`src`\ . \ `zos\_blockinfile <./zos_blockinfile.html>`__\ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source *src*. `zos_blockinfile <./zos_blockinfile.html>`_ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -168,7 +168,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. | **required**: False | **type**: bool @@ -290,13 +290,13 @@ Notes .. note:: It is the playbook author or user's responsibility to avoid files that should not be encoded, such as binary files. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. - All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. + All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - When using \`\`with\_\*\`\` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. + When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - When more then one block should be handled in a file you must change the \ :emphasis:`marker`\ per task. + When more then one block should be handled in a file you must change the *marker* per task. @@ -315,7 +315,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 5ea5bf3ef..453f07302 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -16,7 +16,7 @@ zos_copy -- Copy data to z/OS Synopsis -------- -- The \ `zos\_copy <./zos_copy.html>`__\ module copies a file or data set from a local or a remote machine to a location on the remote machine. +- The `zos_copy <./zos_copy.html>`_ module copies a file or data set from a local or a remote machine to a location on the remote machine. @@ -27,17 +27,17 @@ Parameters asa_text - If set to \ :literal:`true`\ , indicates that either \ :literal:`src`\ or \ :literal:`dest`\ or both contain ASA control characters. + If set to ``true``, indicates that either ``src`` or ``dest`` or both contain ASA control characters. - When \ :literal:`src`\ is a USS file and \ :literal:`dest`\ is a data set, the copy will preserve ASA control characters in the destination. + When ``src`` is a USS file and ``dest`` is a data set, the copy will preserve ASA control characters in the destination. - When \ :literal:`src`\ is a data set containing ASA control characters and \ :literal:`dest`\ is a USS file, the copy will put all control characters as plain text in the destination. + When ``src`` is a data set containing ASA control characters and ``dest`` is a USS file, the copy will put all control characters as plain text in the destination. - If \ :literal:`dest`\ is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). + If ``dest`` is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). - If neither \ :literal:`src`\ or \ :literal:`dest`\ have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. + If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If \ :literal:`is\_binary`\ is \ :literal:`true`\ or \ :literal:`executable`\ is \ :literal:`true`\ as well, the module will fail. + This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. | **required**: False | **type**: bool @@ -47,7 +47,7 @@ asa_text backup Specifies whether a backup of the destination should be created before copying data. - When set to \ :literal:`true`\ , the module creates a backup file or data set. + When set to ``true``, the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -59,24 +59,24 @@ backup backup_name Specify a unique USS file name or data set name for the destination backup. - If the destination \ :literal:`dest`\ is a USS file or path, the \ :literal:`backup\_name`\ must be an absolute path name. + If the destination ``dest`` is a USS file or path, the ``backup_name`` must be an absolute path name. - If the destination is an MVS data set name, the \ :literal:`backup\_name`\ provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. + If the destination is an MVS data set name, the ``backup_name`` provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. - If the \ :literal:`backup\_name`\ is not provided, the default \ :literal:`backup\_name`\ will be used. If the \ :literal:`dest`\ is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the \ :literal:`dest`\ is an MVS data set, it will be a data set with a randomly generated name. + If the ``backup_name`` is not provided, the default ``backup_name`` will be used. If the ``dest`` is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the ``dest`` is an MVS data set, it will be a data set with a randomly generated name. - If \ :literal:`dest`\ is a data set member and \ :literal:`backup\_name`\ is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If ``dest`` is a data set member and ``backup_name`` is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str content - When used instead of \ :literal:`src`\ , sets the contents of a file or data set directly to the specified value. + When used instead of ``src``, sets the contents of a file or data set directly to the specified value. - Works only when \ :literal:`dest`\ is a USS file, sequential data set, or a partitioned data set member. + Works only when ``dest`` is a USS file, sequential data set, or a partitioned data set member. - If \ :literal:`dest`\ is a directory, then content will be copied to \ :literal:`/path/to/dest/inline\_copy`\ . + If ``dest`` is a directory, then content will be copied to ``/path/to/dest/inline_copy``. | **required**: False | **type**: str @@ -85,27 +85,27 @@ content dest The remote absolute path or data set where the content should be copied to. - \ :literal:`dest`\ can be a USS file, directory or MVS data set name. + ``dest`` can be a USS file, directory or MVS data set name. - If \ :literal:`dest`\ has missing parent directories, they will be created. + If ``dest`` has missing parent directories, they will be created. - If \ :literal:`dest`\ is a nonexistent USS file, it will be created. + If ``dest`` is a nonexistent USS file, it will be created. - If \ :literal:`dest`\ is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail. + If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail. - If \ :literal:`dest`\ is a nonexistent data set, it will be created following the process outlined here and in the \ :literal:`volume`\ option. + If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. - If \ :literal:`dest`\ is a nonexistent data set, the attributes assigned will depend on the type of \ :literal:`src`\ . If \ :literal:`src`\ is a USS file, \ :literal:`dest`\ will have a Fixed Block (FB) record format and the remaining attributes will be computed. If \ :emphasis:`is\_binary=true`\ , \ :literal:`dest`\ will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If \ :emphasis:`executable=true`\ ,\ :literal:`dest`\ will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. - When \ :literal:`dest`\ is a data set, precedence rules apply. If \ :literal:`dest\_data\_set`\ is set, this will take precedence over an existing data set. If \ :literal:`dest`\ is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, \ :literal:`dest`\ will be created with the same attributes of \ :literal:`src`\ . + When ``dest`` is a data set, precedence rules apply. If ``dest_data_set`` is set, this will take precedence over an existing data set. If ``dest`` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, ``dest`` will be created with the same attributes of ``src``. - When the \ :literal:`dest`\ is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) \ :literal:`dest`\ will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + When the ``dest`` is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) ``dest`` will be deleted and recreated following the process outlined in the ``volume`` option. - When the \ :literal:`dest`\ is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + When the ``dest`` is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the ``volume`` option. - When \ :literal:`dest`\ is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + When ``dest`` is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the ``volume`` option. - When \ :literal:`dest`\ is a data set, you can override storage management rules by specifying \ :literal:`volume`\ if the storage class being used has GUARANTEED\_SPACE=YES specified, otherwise, the allocation will fail. See \ :literal:`volume`\ for more volume related processes. + When ``dest`` is a data set, you can override storage management rules by specifying ``volume`` if the storage class being used has GUARANTEED_SPACE=YES specified, otherwise, the allocation will fail. See ``volume`` for more volume related processes. | **required**: True | **type**: str @@ -114,9 +114,9 @@ dest encoding Specifies which encodings the destination file or data set should be converted from and to. - If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. + If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - Only valid if \ :literal:`is\_binary`\ is false. + Only valid if ``is_binary`` is false. | **required**: False | **type**: dict @@ -140,22 +140,22 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str force - If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is empty, the \ :literal:`dest`\ will be reused. + If set to ``true`` and the remote file or data set ``dest`` is empty, the ``dest`` will be reused. - If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is NOT empty, the \ :literal:`dest`\ will be deleted and recreated with the \ :literal:`src`\ data set attributes, otherwise it will be recreated with the \ :literal:`dest`\ data set attributes. + If set to ``true`` and the remote file or data set ``dest`` is NOT empty, the ``dest`` will be deleted and recreated with the ``src`` data set attributes, otherwise it will be recreated with the ``dest`` data set attributes. - To backup data before any deletion, see parameters \ :literal:`backup`\ and \ :literal:`backup\_name`\ . + To backup data before any deletion, see parameters ``backup`` and ``backup_name``. - If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. + If set to ``false``, the file or data set will only be copied if the destination does not exist. - If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. + If set to ``false`` and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -163,11 +163,11 @@ force force_lock - By default, when \ :literal:`dest`\ is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use \ :literal:`force\_lock`\ to bypass this check and continue with copy. + By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. - If set to \ :literal:`true`\ and destination is a MVS data set opened by another process then zos\_copy will try to copy using DISP=SHR. + If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. - Using \ :literal:`force\_lock`\ uses operations that are subject to race conditions and can lead to data loss, use with caution. + Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. @@ -177,9 +177,9 @@ force_lock ignore_sftp_stderr - During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. + During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. | **required**: False | **type**: bool @@ -187,11 +187,11 @@ ignore_sftp_stderr is_binary - If set to \ :literal:`true`\ , indicates that the file or data set to be copied is a binary file or data set. + If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. - When \ :emphasis:`is\_binary=true`\ , no encoding conversion is applied to the content, all content transferred retains the original state. + When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. - Use \ :emphasis:`is\_binary=true`\ when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. + Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -199,15 +199,15 @@ is_binary executable - If set to \ :literal:`true`\ , indicates that the file or library to be copied is an executable. + If set to ``true``, indicates that the file or library to be copied is an executable. - If the \ :literal:`src`\ executable has an alias, the alias information is also copied. If the \ :literal:`dest`\ is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. + If the ``src`` executable has an alias, the alias information is also copied. If the ``dest`` is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. - If \ :emphasis:`executable=true`\ , and \ :literal:`dest`\ is a data set, it must be a PDS or PDSE (library). + If *executable=true*, and ``dest`` is a data set, it must be a PDS or PDSE (library). - If \ :literal:`dest`\ is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. - If \ :literal:`dest`\ is a file, execute permission for the user will be added to the file (\`\`u+x\`\`). + If ``dest`` is a file, execute permission for the user will be added to the file (``u+x``). | **required**: False | **type**: bool @@ -215,9 +215,9 @@ executable aliases - If set to \ :literal:`true`\ , indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + If set to ``true``, indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. - Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when \ :literal:`executable=True`\ and \ :literal:`dest`\ is a USS file or directory, this option will be ignored. + Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when ``executable=True`` and ``dest`` is a USS file or directory, this option will be ignored. Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. @@ -239,7 +239,7 @@ group When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str @@ -248,13 +248,13 @@ group mode The permission of the destination file or directory. - If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. + The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. - \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. + *mode=preserve* means that the file will be given the same permissions as the source file. | **required**: False | **type**: str @@ -265,16 +265,16 @@ owner When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str remote_src - If set to \ :literal:`false`\ , the module searches for \ :literal:`src`\ at the local machine. + If set to ``false``, the module searches for ``src`` at the local machine. - If set to \ :literal:`true`\ , the module goes to the remote/target machine for \ :literal:`src`\ . + If set to ``true``, the module goes to the remote/target machine for ``src``. | **required**: False | **type**: bool @@ -284,23 +284,23 @@ remote_src src Path to a file/directory or name of a data set to copy to remote z/OS system. - If \ :literal:`remote\_src`\ is true, then \ :literal:`src`\ must be the path to a Unix System Services (USS) file, name of a data set, or data set member. + If ``remote_src`` is true, then ``src`` must be the path to a Unix System Services (USS) file, name of a data set, or data set member. - If \ :literal:`src`\ is a local path or a USS path, it can be absolute or relative. + If ``src`` is a local path or a USS path, it can be absolute or relative. - If \ :literal:`src`\ is a directory, \ :literal:`dest`\ must be a partitioned data set or a USS directory. + If ``src`` is a directory, ``dest`` must be a partitioned data set or a USS directory. - If \ :literal:`src`\ is a file and \ :literal:`dest`\ ends with "/" or is a directory, the file is copied to the directory with the same filename as \ :literal:`src`\ . + If ``src`` is a file and ``dest`` ends with "/" or is a directory, the file is copied to the directory with the same filename as ``src``. - If \ :literal:`src`\ is a directory and ends with "/", the contents of it will be copied into the root of \ :literal:`dest`\ . If it doesn't end with "/", the directory itself will be copied. + If ``src`` is a directory and ends with "/", the contents of it will be copied into the root of ``dest``. If it doesn't end with "/", the directory itself will be copied. - If \ :literal:`src`\ is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. + If ``src`` is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. - If \ :literal:`src`\ is a VSAM data set, \ :literal:`dest`\ must also be a VSAM. + If ``src`` is a VSAM data set, ``dest`` must also be a VSAM. Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. - Required unless using \ :literal:`content`\ . + Required unless using ``content``. | **required**: False | **type**: str @@ -317,22 +317,22 @@ validate volume - If \ :literal:`dest`\ does not exist, specify which volume \ :literal:`dest`\ should be allocated to. + If ``dest`` does not exist, specify which volume ``dest`` should be allocated to. Only valid when the destination is an MVS data set. The volume must already be present on the device. - If no volume is specified, storage management rules will be used to determine the volume where \ :literal:`dest`\ will be allocated. + If no volume is specified, storage management rules will be used to determine the volume where ``dest`` will be allocated. - If the storage administrator has specified a system default unit name and you do not set a \ :literal:`volume`\ name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. + If the storage administrator has specified a system default unit name and you do not set a ``volume`` name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. | **required**: False | **type**: str dest_data_set - Data set attributes to customize a \ :literal:`dest`\ data set to be copied into. + Data set attributes to customize a ``dest`` data set to be copied into. | **required**: False | **type**: dict @@ -347,18 +347,18 @@ dest_data_set space_primary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int space_secondary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -367,7 +367,7 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -375,7 +375,7 @@ dest_data_set record_format - If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) + If the destination data set does not exist, this sets the format of the data set. (e.g ``fb``) Choices are case-sensitive. @@ -412,9 +412,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -423,9 +423,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -472,13 +472,13 @@ dest_data_set use_template - Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when \ :literal:`src`\ is a local file or directory. + Only valid when ``src`` is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ | **required**: False | **type**: bool @@ -488,9 +488,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. - These options are ignored unless \ :literal:`use\_template`\ is true. + These options are ignored unless ``use_template`` is true. | **required**: False | **type**: dict @@ -569,7 +569,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -803,17 +803,17 @@ Notes .. note:: Destination data sets are assumed to be in catalog. When trying to copy to an uncataloged data set, the module assumes that the data set does not exist and will create it. - Destination will be backed up if either \ :literal:`backup`\ is \ :literal:`true`\ or \ :literal:`backup\_name`\ is provided. If \ :literal:`backup`\ is \ :literal:`false`\ but \ :literal:`backup\_name`\ is provided, task will fail. + Destination will be backed up if either ``backup`` is ``true`` or ``backup_name`` is provided. If ``backup`` is ``false`` but ``backup_name`` is provided, task will fail. When copying local files or directories, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file or directory being copied. Temporary files will always be deleted, regardless of success or failure of the copy task. VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos\_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option \ :literal:`executable`\ that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos\_copy.html) error. + Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. @@ -920,7 +920,7 @@ destination_attributes checksum - SHA256 checksum of the file after running zos\_copy. + SHA256 checksum of the file after running zos_copy. | **returned**: When ``validate=true`` and if ``dest`` is USS | **type**: str diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 3300c7d40..668b367bf 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -28,11 +28,11 @@ Parameters name - The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) + The name of the data set being managed. (e.g ``USER.TEST``) - If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ and not using \ :emphasis:`batch`\ . + Required if *type=member* or *state!=present* and not using *batch*. | **required**: False | **type**: str @@ -41,49 +41,49 @@ name state The final state desired for specified data set. - If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. + If *state=absent* and *type=member* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If *state=present* and *type=member* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. | **required**: False @@ -93,9 +93,9 @@ state type - The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ). + The data set type to be used when creating a data set. (e.g ``pdse``). - \ :literal:`member`\ expects to be used with an existing partitioned data set. + ``member`` expects to be used with an existing partitioned data set. Choices are case-sensitive. @@ -108,7 +108,7 @@ type space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -118,7 +118,7 @@ space_primary space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -128,7 +128,7 @@ space_secondary space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -137,11 +137,11 @@ space_type record_format - The format of the data set. (e.g \ :literal:`FB`\ ) + The format of the data set. (e.g ``FB``) Choices are case-sensitive. - When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . + When *type=ksds*, *type=esds*, *type=rrds*, *type=lds* or *type=zfs* then *record_format=None*, these types do not have a default *record_format*. | **required**: False | **type**: str @@ -216,9 +216,9 @@ directory_blocks key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -227,28 +227,28 @@ key_offset key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int volumes - If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. + If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. - If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. + If creating a data set, *volumes* specifies the volume(s) where the data set should be created. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . + *volumes* is required when *state=cataloged*. Accepts a string when using a single volume and a list of strings when using multiple. @@ -257,12 +257,12 @@ volumes replace - When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. + When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. + If *replace=True*, all data in the original data set will be lost. | **required**: False | **type**: bool @@ -272,7 +272,7 @@ replace tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -283,9 +283,9 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. - The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . + The *force=True* only applies to data set members when *state=absent* and *type=member*. | **required**: False | **type**: bool @@ -301,11 +301,11 @@ batch name - The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) + The name of the data set being managed. (e.g ``USER.TEST``) - If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ + Required if *type=member* or *state!=present* | **required**: False | **type**: str @@ -314,49 +314,49 @@ batch state The final state desired for specified data set. - If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. + If *state=absent* and *type=member* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If *state=present* and *type=member* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. | **required**: False @@ -366,9 +366,9 @@ batch type - The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ) + The data set type to be used when creating a data set. (e.g ``pdse``) - \ :literal:`member`\ expects to be used with an existing partitioned data set. + ``member`` expects to be used with an existing partitioned data set. Choices are case-sensitive. @@ -381,7 +381,7 @@ batch space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -391,7 +391,7 @@ batch space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -401,7 +401,7 @@ batch space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -410,11 +410,11 @@ batch record_format - The format of the data set. (e.g \ :literal:`FB`\ ) + The format of the data set. (e.g ``FB``) Choices are case-sensitive. - When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . + When *type=ksds*, *type=esds*, *type=rrds*, *type=lds* or *type=zfs* then *record_format=None*, these types do not have a default *record_format*. | **required**: False | **type**: str @@ -489,9 +489,9 @@ batch key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -500,28 +500,28 @@ batch key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int volumes - If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. + If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. - If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. + If creating a data set, *volumes* specifies the volume(s) where the data set should be created. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . + *volumes* is required when *state=cataloged*. Accepts a string when using a single volume and a list of strings when using multiple. @@ -530,12 +530,12 @@ batch replace - When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. + When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. + If *replace=True*, all data in the original data set will be lost. | **required**: False | **type**: bool @@ -547,9 +547,9 @@ batch This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. - The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . + The *force=True* only applies to data set members when *state=absent* and *type=member*. | **required**: False | **type**: bool diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 68089a3a6..4c2294e24 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -37,7 +37,7 @@ encoding from - The character set of the source \ :emphasis:`src`\ . + The character set of the source *src*. | **required**: False | **type**: str @@ -45,7 +45,7 @@ encoding to - The destination \ :emphasis:`dest`\ character set for the output to be written as. + The destination *dest* character set for the output to be written as. | **required**: False | **type**: str @@ -58,7 +58,7 @@ src The USS path or file must be an absolute pathname. - If \ :emphasis:`src`\ is a USS directory, all files will be encoded. + If *src* is a USS directory, all files will be encoded. | **required**: True | **type**: str @@ -67,11 +67,11 @@ src dest The location where the converted characters are output. - The destination \ :emphasis:`dest`\ can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). + The destination *dest* can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). - If the length of the PDSE member name used in \ :emphasis:`dest`\ is greater than 8 characters, the member name will be truncated when written out. + If the length of the PDSE member name used in *dest* is greater than 8 characters, the member name will be truncated when written out. - If \ :emphasis:`dest`\ is not specified, the \ :emphasis:`src`\ will be used as the destination and will overwrite the \ :emphasis:`src`\ with the character set in the option \ :emphasis:`to\_encoding`\ . + If *dest* is not specified, the *src* will be used as the destination and will overwrite the *src* with the character set in the option *to_encoding*. The USS file or path must be an absolute pathname. @@ -80,9 +80,9 @@ dest backup - Creates a backup file or backup data set for \ :emphasis:`dest`\ , including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for *dest*, including the timestamp information to ensure that you retrieve the original file. - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. | **required**: False | **type**: bool @@ -92,13 +92,13 @@ backup backup_name Specify the USS file name or data set name for the dest backup. - If dest is a USS file or path, \ :emphasis:`backup\_name`\ must be a file or path name, and the USS path or file must be an absolute pathname. + If dest is a USS file or path, *backup_name* must be a file or path name, and the USS path or file must be an absolute pathname. - If dest is an MVS data set, the \ :emphasis:`backup\_name`\ must be an MVS data set name. + If dest is an MVS data set, the *backup_name* must be an MVS data set name. - If \ :emphasis:`backup\_name`\ is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file\_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. + If *backup_name* is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. - \ :literal:`backup\_name`\ will be returned on either success or failure of module execution such that data can be retrieved. + ``backup_name`` will be returned on either success or failure of module execution such that data can be retrieved. | **required**: False | **type**: str @@ -107,7 +107,7 @@ backup_name backup_compress Determines if backups to USS files or paths should be compressed. - \ :emphasis:`backup\_compress`\ is only used when \ :emphasis:`backup=true`\ . + *backup_compress* is only used when *backup=true*. | **required**: False | **type**: bool @@ -117,7 +117,7 @@ backup_compress tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -265,7 +265,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. @@ -278,7 +278,7 @@ Return Values src - The location of the input characters identified in option \ :emphasis:`src`\ . + The location of the input characters identified in option *src*. | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 7cdcabbd5..87a50a65a 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -20,7 +20,7 @@ Synopsis - When fetching a sequential data set, the destination file name will be the same as the data set name. - When fetching a PDS or PDSE, the destination will be a directory with the same name as the PDS or PDSE. - When fetching a PDS/PDSE member, destination will be a file. -- Files that already exist at \ :literal:`dest`\ will be overwritten if they are different than \ :literal:`src`\ . +- Files that already exist at ``dest`` will be overwritten if they are different than ``src``. @@ -96,7 +96,7 @@ encoding from - The character set of the source \ :emphasis:`src`\ . + The character set of the source *src*. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -105,7 +105,7 @@ encoding to - The destination \ :emphasis:`dest`\ character set for the output to be written as. + The destination *dest* character set for the output to be written as. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -117,16 +117,16 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str ignore_sftp_stderr - During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. + During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. | **required**: False | **type**: bool @@ -196,13 +196,13 @@ Notes .. note:: When fetching PDSE and VSAM data sets, temporary storage will be used on the remote z/OS system. After the PDSE or VSAM data set is successfully transferred, the temporary storage will be deleted. The size of the temporary storage will correspond to the size of PDSE or VSAM data set being fetched. If module execution fails, the temporary storage will be deleted. - To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the \ :literal:`checksum`\ parameter. + To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the ``checksum`` parameter. All data sets are always assumed to be cataloged. If an uncataloged data set needs to be fetched, it should be cataloged first. Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -263,7 +263,7 @@ data_set_type | **sample**: PDSE note - Notice of module failure when \ :literal:`fail\_on\_missing`\ is false. + Notice of module failure when ``fail_on_missing`` is false. | **returned**: failure and fail_on_missing=false | **type**: str diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index 83082b5c0..f195b2c2c 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -18,7 +18,7 @@ Synopsis -------- - Return a list of data sets based on specific criteria. - Multiple criteria can be added (AND'd) together. -- The \ :literal:`zos\_find`\ module can only find MVS data sets. Use the \ `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`__\ module to find USS files. +- The ``zos_find`` module can only find MVS data sets. Use the `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`_ module to find USS files. @@ -44,9 +44,9 @@ age age_stamp Choose the age property against which to compare age. - \ :literal:`creation\_date`\ is the date the data set was created and \ :literal:`ref\_date`\ is the date the data set was last referenced. + ``creation_date`` is the date the data set was created and ``ref_date`` is the date the data set was last referenced. - \ :literal:`ref\_date`\ is only applicable to sequential and partitioned data sets. + ``ref_date`` is only applicable to sequential and partitioned data sets. | **required**: False | **type**: str @@ -80,7 +80,7 @@ patterns This parameter expects a list, which can be either comma separated or YAML. - If \ :literal:`pds\_patterns`\ is provided, \ :literal:`patterns`\ must be member patterns. + If ``pds_patterns`` is provided, ``patterns`` must be member patterns. When searching for members within a PDS/PDSE, pattern can be a regular expression. @@ -107,7 +107,7 @@ pds_patterns Required when searching for data set members. - Valid only for \ :literal:`nonvsam`\ resource types. Otherwise ignored. + Valid only for ``nonvsam`` resource types. Otherwise ignored. | **required**: False | **type**: list @@ -117,9 +117,9 @@ pds_patterns resource_type The type of resource to search. - \ :literal:`nonvsam`\ refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. + ``nonvsam`` refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. - \ :literal:`cluster`\ refers to a VSAM cluster. The \ :literal:`data`\ and \ :literal:`index`\ are the data and index components of a VSAM cluster. + ``cluster`` refers to a VSAM cluster. The ``data`` and ``index`` are the data and index components of a VSAM cluster. | **required**: False | **type**: str @@ -192,11 +192,11 @@ Notes ----- .. note:: - Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. + Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. - The \ `zos\_find <./zos_find.html>`__\ module currently does not support wildcards for high level qualifiers. For example, \ :literal:`SOME.\*.DATA.SET`\ is a valid pattern, but \ :literal:`\*.DATA.SET`\ is not. + The `zos_find <./zos_find.html>`_ module currently does not support wildcards for high level qualifiers. For example, ``SOME.*.DATA.SET`` is a valid pattern, but ``*.DATA.SET`` is not. - If a data set pattern is specified as \ :literal:`USER.\*`\ , the matching data sets will have two name segments such as \ :literal:`USER.ABC`\ , \ :literal:`USER.XYZ`\ etc. If a wildcard is specified as \ :literal:`USER.\*.ABC`\ , the matching data sets will have three name segments such as \ :literal:`USER.XYZ.ABC`\ , \ :literal:`USER.TEST.ABC`\ etc. + If a data set pattern is specified as ``USER.*``, the matching data sets will have two name segments such as ``USER.ABC``, ``USER.XYZ`` etc. If a wildcard is specified as ``USER.*.ABC``, the matching data sets will have three name segments such as ``USER.XYZ.ABC``, ``USER.TEST.ABC`` etc. The time taken to execute the module is proportional to the number of data sets present on the system and how large the data sets are. diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 02a56fd23..0247ffd96 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -17,8 +17,8 @@ zos_gather_facts -- Gather z/OS system facts. Synopsis -------- - Retrieve variables from target z/OS systems. -- Variables are added to the \ :emphasis:`ansible\_facts`\ dictionary, available to playbooks. -- Apply filters on the \ :emphasis:`gather\_subset`\ list to reduce the variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. +- Variables are added to the *ansible_facts* dictionary, available to playbooks. +- Apply filters on the *gather_subset* list to reduce the variables that are added to the *ansible_facts* dictionary. - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. @@ -32,7 +32,7 @@ Parameters gather_subset If specified, it will collect facts that come under the specified subset (eg. ipl will return ipl facts). Specifying subsets is recommended to reduce time in gathering facts when the facts needed are in a specific subset. - The following subsets are available \ :literal:`ipl`\ , \ :literal:`cpu`\ , \ :literal:`sys`\ , and \ :literal:`iodf`\ . Depending on the version of ZOAU, additional subsets may be available. + The following subsets are available ``ipl``, ``cpu``, ``sys``, and ``iodf``. Depending on the version of ZOAU, additional subsets may be available. | **required**: False | **type**: list @@ -41,13 +41,13 @@ gather_subset filter - Filter out facts from the \ :emphasis:`ansible\_facts`\ dictionary. + Filter out facts from the *ansible_facts* dictionary. - Uses shell-style \ `fnmatch <https://docs.python.org/3/library/fnmatch.html>`__\ pattern matching to filter out the collected facts. + Uses shell-style `fnmatch <https://docs.python.org/3/library/fnmatch.html>`_ pattern matching to filter out the collected facts. - An empty list means 'no filter', same as providing '\*'. + An empty list means 'no filter', same as providing '*'. - Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. To restrict the facts that are collected, refer to the \ :emphasis:`gather\_subset`\ parameter. + Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the *ansible_facts* dictionary. To restrict the facts that are collected, refer to the *gather_subset* parameter. | **required**: False | **type**: list diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index 59e37aeb9..efea6ea2a 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -18,9 +18,9 @@ Synopsis -------- - Display the z/OS job output for a given criteria (Job id/Job name/owner) with/without a data definition name as a filter. - At least provide a job id/job name/owner. -- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC\*" or "\*". -- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP\*" or "\*". -- The owner can be specific such as "IBMUSER", or one that uses a pattern like "\*". +- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC*" or "*". +- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP*" or "*". +- The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". - If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. @@ -32,21 +32,21 @@ Parameters job_id - The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC\*") + The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC*") | **required**: False | **type**: str job_name - The name of the batch job. (e.g "TCPIP", "C\*") + The name of the batch job. (e.g "TCPIP", "C*") | **required**: False | **type**: str owner - The owner who ran the job. (e.g "IBMUSER", "\*") + The owner who ran the job. (e.g "IBMUSER", "*") | **required**: False | **type**: str @@ -97,7 +97,7 @@ Return Values jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. | **returned**: success | **type**: list @@ -416,7 +416,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index e4da71341..ea320dfc3 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -17,8 +17,8 @@ zos_job_query -- Query job status Synopsis -------- - List z/OS job(s) and the current status of the job(s). -- Uses job\_name to filter the jobs by the job name. -- Uses job\_id to filter the jobs by the job identifier. +- Uses job_name to filter the jobs by the job name. +- Uses job_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. @@ -35,9 +35,9 @@ job_name A job name can be up to 8 characters long. - The \ :emphasis:`job\_name`\ can contain include multiple wildcards. + The *job_name* can contain include multiple wildcards. - The asterisk (\`\*\`) wildcard will match zero or more specified characters. + The asterisk (`*`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -56,13 +56,13 @@ owner job_id The job id that has been assigned to the job. - A job id must begin with \`STC\`, \`JOB\`, \`TSU\` and are followed by up to 5 digits. + A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. - When a job id is greater than 99,999, the job id format will begin with \`S\`, \`J\`, \`T\` and are followed by 7 digits. + When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. - The \ :emphasis:`job\_id`\ can contain include multiple wildcards. + The *job_id* can contain include multiple wildcards. - The asterisk (\`\*\`) wildcard will match zero or more specified characters. + The asterisk (`*`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -122,7 +122,7 @@ changed | **type**: bool jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. | **returned**: success | **type**: list @@ -211,7 +211,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 964ab8f4b..a6f55acf9 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -42,13 +42,13 @@ src location - The JCL location. Supported choices are \ :literal:`data\_set`\ , \ :literal:`uss`\ or \ :literal:`local`\ . + The JCL location. Supported choices are ``data_set``, ``uss`` or ``local``. - \ :literal:`data\_set`\ can be a PDS, PDSE, or sequential data set. + ``data_set`` can be a PDS, PDSE, or sequential data set. - \ :literal:`uss`\ means the JCL location is located in UNIX System Services (USS). + ``uss`` means the JCL location is located in UNIX System Services (USS). - \ :literal:`local`\ means locally to the ansible control node. + ``local`` means locally to the ansible control node. | **required**: False | **type**: str @@ -57,9 +57,9 @@ location wait_time_s - Option \ :emphasis:`wait\_time\_s`\ is the total time that module \ `zos\_job\_submit <./zos_job_submit.html>`__\ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. + Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - \ :emphasis:`wait\_time\_s`\ is measured in seconds and must be a value greater than 0 and less than 86400. + *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. | **required**: False | **type**: int @@ -86,9 +86,9 @@ return_output volume The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system. - When configured, the \ `zos\_job\_submit <./zos_job_submit.html>`__\ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. + When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for \ :emphasis:`location=uss`\ and \ :emphasis:`location=local`\ . + Ignored for *location=uss* and *location=local*. | **required**: False | **type**: str @@ -97,7 +97,7 @@ volume encoding Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - This option is only supported for when \ :emphasis:`location=local`\ . + This option is only supported for when *location=local*. If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding. @@ -129,13 +129,13 @@ encoding use_template - Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when \ :literal:`src`\ is a local file or directory. + Only valid when ``src`` is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ | **required**: False | **type**: bool @@ -145,9 +145,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. - These options are ignored unless \ :literal:`use\_template`\ is true. + These options are ignored unless ``use_template`` is true. | **required**: False | **type**: dict @@ -226,7 +226,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -318,9 +318,9 @@ Notes ----- .. note:: - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -333,7 +333,7 @@ Return Values jobs - List of jobs output. If no job status is found, this will return an empty ret\_code with msg\_txt explanation. + List of jobs output. If no job status is found, this will return an empty ret_code with msg_txt explanation. | **returned**: success | **type**: list @@ -680,25 +680,25 @@ jobs msg Job status resulting from the job submission. - Job status \`ABEND\` indicates the job ended abnormally. + Job status `ABEND` indicates the job ended abnormally. - Job status \`AC\` indicates the job is active, often a started task or job taking long. + Job status `AC` indicates the job is active, often a started task or job taking long. - Job status \`CAB\` indicates a converter abend. + Job status `CAB` indicates a converter abend. - Job status \`CANCELED\` indicates the job was canceled. + Job status `CANCELED` indicates the job was canceled. - Job status \`CNV\` indicates a converter error. + Job status `CNV` indicates a converter error. - Job status \`FLU\` indicates the job was flushed. + Job status `FLU` indicates the job was flushed. - Job status \`JCLERR\` or \`JCL ERROR\` indicates the JCL has an error. + Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. - Job status \`SEC\` or \`SEC ERROR\` indicates the job as encountered a security error. + Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. - Job status \`SYS\` indicates a system failure. + Job status `SYS` indicates a system failure. - Job status \`?\` indicates status can not be determined. + Job status `?` indicates status can not be determined. Jobs where status can not be determined will result in None (NULL). diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 983e5ca0b..f7005017e 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -40,13 +40,13 @@ src regexp The regular expression to look for in every line of the USS file or data set. - For \ :literal:`state=present`\ , the pattern to replace if found. Only the last line found will be replaced. + For ``state=present``, the pattern to replace if found. Only the last line found will be replaced. - For \ :literal:`state=absent`\ , the pattern of the line(s) to remove. + For ``state=absent``, the pattern of the line(s) to remove. - If the regular expression is not matched, the line will be added to the USS file or data set in keeping with \ :literal:`insertbefore`\ or \ :literal:`insertafter`\ settings. + If the regular expression is not matched, the line will be added to the USS file or data set in keeping with ``insertbefore`` or ``insertafter`` settings. - When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by \ :literal:`line`\ to ensure idempotence. + When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by ``line`` to ensure idempotence. | **required**: False | **type**: str @@ -64,22 +64,22 @@ state line The line to insert/replace into the USS file or data set. - Required for \ :literal:`state=present`\ . + Required for ``state=present``. - If \ :literal:`backrefs`\ is set, may contain backreferences that will get expanded with the \ :literal:`regexp`\ capture groups if the regexp matches. + If ``backrefs`` is set, may contain backreferences that will get expanded with the ``regexp`` capture groups if the regexp matches. | **required**: False | **type**: str backrefs - Used with \ :literal:`state=present`\ . + Used with ``state=present``. - If set, \ :literal:`line`\ can contain backreferences (both positional and named) that will get populated if the \ :literal:`regexp`\ matches. + If set, ``line`` can contain backreferences (both positional and named) that will get populated if the ``regexp`` matches. - This parameter changes the operation of the module slightly; \ :literal:`insertbefore`\ and \ :literal:`insertafter`\ will be ignored, and if the \ :literal:`regexp`\ does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. + This parameter changes the operation of the module slightly; ``insertbefore`` and ``insertafter`` will be ignored, and if the ``regexp`` does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. - If the \ :literal:`regexp`\ does match, the last matching line will be replaced by the expanded line parameter. + If the ``regexp`` does match, the last matching line will be replaced by the expanded line parameter. | **required**: False | **type**: bool @@ -87,23 +87,23 @@ backrefs insertafter - Used with \ :literal:`state=present`\ . + Used with ``state=present``. If specified, the line will be inserted after the last match of specified regular expression. If the first match is required, use(firstmatch=yes). - A special value is available; \ :literal:`EOF`\ for inserting the line at the end of the USS file or data set. + A special value is available; ``EOF`` for inserting the line at the end of the USS file or data set. If the specified regular expression has no matches, EOF will be used instead. - If \ :literal:`insertbefore`\ is set, default value \ :literal:`EOF`\ will be ignored. + If ``insertbefore`` is set, default value ``EOF`` will be ignored. - If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertafter`\ , \ :literal:`insertafter`\ is only honored if no match for \ :literal:`regexp`\ is found. + If regular expressions are passed to both ``regexp`` and ``insertafter``, ``insertafter`` is only honored if no match for ``regexp`` is found. - May not be used with \ :literal:`backrefs`\ or \ :literal:`insertbefore`\ . + May not be used with ``backrefs`` or ``insertbefore``. - Choices are EOF or '\*regex\*' + Choices are EOF or '*regex*' Default is EOF @@ -112,30 +112,30 @@ insertafter insertbefore - Used with \ :literal:`state=present`\ . + Used with ``state=present``. If specified, the line will be inserted before the last match of specified regular expression. - If the first match is required, use \ :literal:`firstmatch=yes`\ . + If the first match is required, use ``firstmatch=yes``. - A value is available; \ :literal:`BOF`\ for inserting the line at the beginning of the USS file or data set. + A value is available; ``BOF`` for inserting the line at the beginning of the USS file or data set. If the specified regular expression has no matches, the line will be inserted at the end of the USS file or data set. - If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertbefore`\ , \ :literal:`insertbefore`\ is only honored if no match for \ :literal:`regexp`\ is found. + If regular expressions are passed to both ``regexp`` and ``insertbefore``, ``insertbefore`` is only honored if no match for ``regexp`` is found. - May not be used with \ :literal:`backrefs`\ or \ :literal:`insertafter`\ . + May not be used with ``backrefs`` or ``insertafter``. - Choices are BOF or '\*regex\*' + Choices are BOF or '*regex*' | **required**: False | **type**: str backup - Creates a backup file or backup data set for \ :emphasis:`src`\ , including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for *src*, including the timestamp information to ensure that you retrieve the original file. - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -147,11 +147,11 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *src* is a USS file or path, the backup_name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -162,16 +162,16 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str firstmatch - Used with \ :literal:`insertafter`\ or \ :literal:`insertbefore`\ . + Used with ``insertafter`` or ``insertbefore``. - If set, \ :literal:`insertafter`\ and \ :literal:`insertbefore`\ will work with the first line that matches the given regular expression. + If set, ``insertafter`` and ``insertbefore`` will work with the first line that matches the given regular expression. | **required**: False | **type**: bool @@ -179,7 +179,7 @@ firstmatch encoding - The character set of the source \ :emphasis:`src`\ . \ `zos\_lineinfile <./zos_lineinfile.html>`__\ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source *src*. `zos_lineinfile <./zos_lineinfile.html>`_ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -193,7 +193,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. | **required**: False | **type**: bool @@ -260,7 +260,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. @@ -273,7 +273,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 9a30c5c91..cfe2f0ae1 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -16,9 +16,9 @@ zos_mount -- Mount a z/OS file system. Synopsis -------- -- The module \ `zos\_mount <./zos_mount.html>`__\ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. -- The \ :emphasis:`src`\ data set must be unique and a Fully Qualified Name (FQN). -- The \ :emphasis:`path`\ will be created if needed. +- The module `zos_mount <./zos_mount.html>`_ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. +- The *src* data set must be unique and a Fully Qualified Name (FQN). +- The *path* will be created if needed. @@ -31,7 +31,7 @@ Parameters path The absolute path name onto which the file system is to be mounted. - The \ :emphasis:`path`\ is case sensitive and must be less than or equal 1023 characters long. + The *path* is case sensitive and must be less than or equal 1023 characters long. | **required**: True | **type**: str @@ -40,9 +40,9 @@ path src The name of the file system to be added to the file system hierarchy. - The file system \ :emphasis:`src`\ must be a data set of type \ :emphasis:`fs\_type`\ . + The file system *src* must be a data set of type *fs_type*. - The file system \ :emphasis:`src`\ data set must be cataloged. + The file system *src* data set must be cataloged. | **required**: True | **type**: str @@ -53,7 +53,7 @@ fs_type The physical file systems data set format to perform the logical mount. - The \ :emphasis:`fs\_type`\ is required to be lowercase. + The *fs_type* is required to be lowercase. | **required**: True | **type**: str @@ -63,25 +63,25 @@ fs_type state The desired status of the described mount (choice). - If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are not in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will be updated, the device will be mounted and the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. - If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will not be updated, the device will not be mounted and the module will complete successfully with \ :emphasis:`changed=False`\ . + If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. - If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are in use, the module will \ :strong:`not`\ add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/data_store*. The device will be unmounted and the module will complete successfully with *changed=True*. - If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are not in use, the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ .The device will remain unchanged and the module will complete with \ :emphasis:`changed=False`\ . + If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/data_store*.The device will remain unchanged and the module will complete with *changed=False*. - If \ :emphasis:`state=present`\ , the module will add the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/data_store* if not present. The module will complete successfully with *changed=True*. - If \ :emphasis:`state=absent`\ , the module will remove the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if present. The module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/data_store* if present. The module will complete successfully with *changed=True*. - If \ :emphasis:`state=remounted`\ , the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and mounted, the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/data_store*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. | **required**: False @@ -91,7 +91,7 @@ state persistent - Add or remove mount command entries to provided \ :emphasis:`data\_store`\ + Add or remove mount command entries to provided *data_store* | **required**: False | **type**: dict @@ -105,9 +105,9 @@ persistent backup - Creates a backup file or backup data set for \ :emphasis:`data\_store`\ , including the timestamp information to ensure that you retrieve the original parameters defined in \ :emphasis:`data\_store`\ . + Creates a backup file or backup data set for *data_store*, including the timestamp information to ensure that you retrieve the original parameters defined in *data_store*. - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -119,11 +119,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`data\_store`\ is a USS file or path, the \ :emphasis:`backup\_name`\ name can be relative or absolute for file or path name. + If the source *data_store* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default \ :emphasis:`backup\_name`\ will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default *backup_name* will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -132,9 +132,9 @@ persistent comment - If provided, this is used as a comment that surrounds the command in the \ :emphasis:`persistent/data\_store`\ + If provided, this is used as a comment that surrounds the command in the *persistent/data_store* - Comments are used to encapsulate the \ :emphasis:`persistent/data\_store`\ entry such that they can easily be understood and located. + Comments are used to encapsulate the *persistent/data_store* entry such that they can easily be understood and located. | **required**: False | **type**: list @@ -145,7 +145,7 @@ persistent unmount_opts Describes how the unmount will be performed. - For more on coded character set identifiers, review the IBM documentation topic \ :strong:`UNMOUNT - Remove a file system from the file hierarchy`\ . + For more on coded character set identifiers, review the IBM documentation topic **UNMOUNT - Remove a file system from the file hierarchy**. | **required**: False | **type**: str @@ -156,13 +156,13 @@ unmount_opts mount_opts Options available to the mount. - If \ :emphasis:`mount\_opts=ro`\ on a mounted/remount, mount is performed read-only. + If *mount_opts=ro* on a mounted/remount, mount is performed read-only. - If \ :emphasis:`mount\_opts=same`\ and (unmount\_opts=remount), mount is opened in the same mode as previously opened. + If *mount_opts=same* and (unmount_opts=remount), mount is opened in the same mode as previously opened. - If \ :emphasis:`mount\_opts=nowait`\ , mount is performed asynchronously. + If *mount_opts=nowait*, mount is performed asynchronously. - If \ :emphasis:`mount\_opts=nosecurity`\ , security checks are not enforced for files in this file system. + If *mount_opts=nosecurity*, security checks are not enforced for files in this file system. | **required**: False | **type**: str @@ -184,11 +184,11 @@ tag_untagged When the file system is unmounted, the tags are lost. - If \ :emphasis:`tag\_untagged=notext`\ none of the untagged files in the file system are automatically converted during file reading and writing. + If *tag_untagged=notext* none of the untagged files in the file system are automatically converted during file reading and writing. - If \ :emphasis:`tag\_untagged=text`\ each untagged file is implicitly marked as containing pure text data that can be converted. + If *tag_untagged=text* each untagged file is implicitly marked as containing pure text data that can be converted. - If this flag is used, use of tag\_ccsid is encouraged. + If this flag is used, use of tag_ccsid is encouraged. | **required**: False | **type**: str @@ -198,13 +198,13 @@ tag_untagged tag_ccsid Identifies the coded character set identifier (ccsid) to be implicitly set for the untagged file. - For more on coded character set identifiers, review the IBM documentation topic \ :strong:`Coded Character Sets`\ . + For more on coded character set identifiers, review the IBM documentation topic **Coded Character Sets**. Specified as a decimal value from 0 to 65535. However, when TEXT is specified, the value must be between 0 and 65535. The value is not checked as being valid and the corresponding code page is not checked as being installed. - Required when \ :emphasis:`tag\_untagged=TEXT`\ . + Required when *tag_untagged=TEXT*. | **required**: False | **type**: int @@ -214,10 +214,10 @@ allow_uid Specifies whether the SETUID and SETGID mode bits on an executable in this file system are considered. Also determines whether the APF extended attribute or the Program Control extended attribute is honored. - If \ :emphasis:`allow\_uid=True`\ the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. + If *allow_uid=True* the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. - If \ :emphasis:`allow\_uid=False`\ the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. + If *allow_uid=False* the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. | **required**: False @@ -226,10 +226,10 @@ allow_uid sysname - For systems participating in shared file system, \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). + For systems participating in shared file system, *sysname* specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). - \ :emphasis:`sysname`\ is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. + *sysname* is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. | **required**: False @@ -240,13 +240,13 @@ automove These parameters apply only in a sysplex where systems are exploiting the shared file system capability. They specify what happens to the ownership of a file system when a shutdown, PFS termination, dead system takeover, or file system move occurs. The default setting is AUTOMOVE where the file system will be randomly moved to another system (no system list used). - \ :emphasis:`automove=automove`\ indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. + *automove=automove* indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. - \ :emphasis:`automove=noautomove`\ prevents movement of the file system's ownership in some situations. + *automove=noautomove* prevents movement of the file system's ownership in some situations. - \ :emphasis:`automove=unmount`\ allows the file system to be unmounted in some situations. + *automove=unmount* allows the file system to be unmounted in some situations. | **required**: False @@ -275,7 +275,7 @@ automove_list tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -389,7 +389,7 @@ Notes If an uncataloged data set needs to be fetched, it should be cataloged first. - Uncataloged data sets can be cataloged using the \ `zos\_data\_set <./zos_data_set.html>`__\ module. + Uncataloged data sets can be cataloged using the `zos_data_set <./zos_data_set.html>`_ module. @@ -467,7 +467,7 @@ persistent | **sample**: SYS1.FILESYS(PRMAABAK) comment - The text that was used in markers around the \ :emphasis:`Persistent/data\_store`\ entry. + The text that was used in markers around the *Persistent/data_store* entry. | **returned**: always | **type**: list @@ -529,7 +529,7 @@ allow_uid true sysname - \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. + *sysname* specifies the particular system on which a mount should be performed. | **returned**: if Non-None | **type**: str diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index c0551786e..500802fe2 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -45,9 +45,9 @@ parm auth Determines whether this program should run with authorized privileges. - If \ :emphasis:`auth=true`\ , the program runs as APF authorized. + If *auth=true*, the program runs as APF authorized. - If \ :emphasis:`auth=false`\ , the program runs as unauthorized. + If *auth=false*, the program runs as unauthorized. | **required**: False | **type**: bool @@ -57,7 +57,7 @@ auth verbose Determines if verbose output should be returned from the underlying utility used by this module. - When \ :emphasis:`verbose=true`\ verbose output is returned on module failure. + When *verbose=true* verbose output is returned on module failure. | **required**: False | **type**: bool @@ -67,19 +67,19 @@ verbose dds The input data source. - \ :emphasis:`dds`\ supports 6 types of sources + *dds* supports 6 types of sources - 1. \ :emphasis:`dd\_data\_set`\ for data set files. + 1. *dd_data_set* for data set files. - 2. \ :emphasis:`dd\_unix`\ for UNIX files. + 2. *dd_unix* for UNIX files. - 3. \ :emphasis:`dd\_input`\ for in-stream data set. + 3. *dd_input* for in-stream data set. - 4. \ :emphasis:`dd\_dummy`\ for no content input. + 4. *dd_dummy* for no content input. - 5. \ :emphasis:`dd\_concat`\ for a data set concatenation. + 5. *dd_concat* for a data set concatenation. - 6. \ :emphasis:`dds`\ supports any combination of source types. + 6. *dds* supports any combination of source types. | **required**: False | **type**: list @@ -89,7 +89,7 @@ dds dd_data_set Specify a data set. - \ :emphasis:`dd\_data\_set`\ can reference an existing data set or be used to define a new data set to be created during execution. + *dd_data_set* can reference an existing data set or be used to define a new data set to be created during execution. | **required**: False | **type**: dict @@ -110,7 +110,7 @@ dds type - The data set type. Only required when \ :emphasis:`disposition=new`\ . + The data set type. Only required when *disposition=new*. Maps to DSNTYPE on z/OS. @@ -120,7 +120,7 @@ dds disposition - \ :emphasis:`disposition`\ indicates the status of a data set. + *disposition* indicates the status of a data set. Defaults to shr. @@ -130,7 +130,7 @@ dds disposition_normal - \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after a normal termination of the program. + *disposition_normal* indicates what to do with the data set after a normal termination of the program. | **required**: False | **type**: str @@ -138,7 +138,7 @@ dds disposition_abnormal - \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after an abnormal termination of the program. + *disposition_abnormal* indicates what to do with the data set after an abnormal termination of the program. | **required**: False | **type**: str @@ -146,15 +146,15 @@ dds reuse - Determines if a data set should be reused if \ :emphasis:`disposition=new`\ and if a data set with a matching name already exists. + Determines if a data set should be reused if *disposition=new* and if a data set with a matching name already exists. - If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . + If *reuse=true*, *disposition* will be automatically switched to ``SHR``. - If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. + If *reuse=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`replace`\ . + Mutually exclusive with *replace*. - \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ + *reuse* is only considered when *disposition=new* | **required**: False | **type**: bool @@ -162,17 +162,17 @@ dds replace - Determines if a data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with a matching name already exists. + Determines if a data set should be replaced if *disposition=new* and a data set with a matching name already exists. - If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. + If *replace=true*, the original data set will be deleted, and a new data set created. - If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. + If *replace=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`reuse`\ . + Mutually exclusive with *reuse*. - \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ + *replace* is only considered when *disposition=new* - \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. + *replace* will result in loss of all data in the original data set unless *backup* is specified. | **required**: False | **type**: bool @@ -180,9 +180,9 @@ dds backup - Determines if a backup should be made of an existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. + Determines if a backup should be made of an existing data set when *disposition=new*, *replace=true*, and a data set with the desired name is found. - \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . + *backup* is only used when *replace=true*. | **required**: False | **type**: bool @@ -190,7 +190,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . + The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. | **required**: False | **type**: str @@ -200,9 +200,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -211,9 +211,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -231,7 +231,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. + *sms_management_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -242,7 +242,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. + *sms_storage_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -253,7 +253,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. + *sms_data_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -264,7 +264,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -280,9 +280,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. + *key_label* is the public name of a protected encryption key in the ICSF key repository. - \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. + *key_label* should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -304,7 +304,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -313,9 +313,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD1 on z/OS. @@ -339,7 +339,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -348,9 +348,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD2 on z/OS. @@ -363,7 +363,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. + If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -376,14 +376,14 @@ dds The first byte of a logical record is position 0. - Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. + Provide *key_offset* only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g \ :literal:`80`\ ). + The logical record length. (e.g ``80``). For variable data sets, the length must include the 4-byte prefix area. @@ -417,11 +417,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -463,7 +463,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=OCREAT`\ . + Or provide the path to an new created UNIX file when *status_group=OCREAT*. The provided path must be absolute. @@ -488,7 +488,7 @@ dds mode - The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . + The file access attributes when the UNIX file is created specified in *path*. Specify the mode as an octal number similarly to chmod. @@ -499,47 +499,47 @@ dds status_group - The status for the UNIX file specified in \ :emphasis:`path`\ . + The status for the UNIX file specified in *path*. - If you do not specify a value for the \ :emphasis:`status\_group`\ parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the *status_group* parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. + *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. - \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. + *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. - \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. + *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. - \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - \ :emphasis:`ononblock`\ specifies the following, depending on the type of file + *ononblock* specifies the following, depending on the type of file For a FIFO special file - 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. + 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. - 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. + 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. - \ :emphasis:`ononblock`\ has no effect on other file types. + *ononblock* has no effect on other file types. - \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . + *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. - When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. + When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -548,7 +548,7 @@ dds access_group - The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . + The kind of access to request for the UNIX file specified in *path*. | **required**: False | **type**: str @@ -556,7 +556,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . + The type of data that is (or will be) stored in the file specified in *path*. Maps to FILEDATA on z/OS. @@ -569,7 +569,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -578,7 +578,7 @@ dds record_length The logical record length for the UNIX file. - \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -589,7 +589,7 @@ dds record_format The record format for the UNIX file. - \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -608,11 +608,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -638,7 +638,7 @@ dds dd_input - \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. + *dd_input* is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -656,15 +656,15 @@ dds content The input contents for the DD. - \ :emphasis:`dd\_input`\ supports single or multiple lines of input. + *dd_input* supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. - When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -682,11 +682,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -696,7 +696,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. + for *dd_input*, *src_encoding* should generally not need to be changed. | **required**: False | **type**: str @@ -714,7 +714,7 @@ dds dd_output - Use \ :emphasis:`dd\_output`\ to specify - Content sent to the DD should be returned to the user. + Use *dd_output* to specify - Content sent to the DD should be returned to the user. | **required**: False | **type**: dict @@ -739,11 +739,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -753,7 +753,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. + for *dd_input*, *src_encoding* should generally not need to be changed. | **required**: False | **type**: str @@ -771,9 +771,9 @@ dds dd_dummy - Use \ :emphasis:`dd\_dummy`\ to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. + Use *dd_dummy* to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. - \ :emphasis:`dd\_dummy`\ accepts no content input. + *dd_dummy* accepts no content input. | **required**: False | **type**: dict @@ -788,7 +788,7 @@ dds dd_vio - \ :emphasis:`dd\_vio`\ is used to handle temporary data sets. + *dd_vio* is used to handle temporary data sets. VIO data sets reside in the paging space; but, to the problem program and the access method, the data sets appear to reside on a direct access storage device. @@ -807,7 +807,7 @@ dds dd_concat - \ :emphasis:`dd\_concat`\ is used to specify a data set concatenation. + *dd_concat* is used to specify a data set concatenation. | **required**: False | **type**: dict @@ -821,7 +821,7 @@ dds dds - A list of DD statements, which can contain any of the following types: \ :emphasis:`dd\_data\_set`\ , \ :emphasis:`dd\_unix`\ , and \ :emphasis:`dd\_input`\ . + A list of DD statements, which can contain any of the following types: *dd_data_set*, *dd_unix*, and *dd_input*. | **required**: False | **type**: list @@ -831,7 +831,7 @@ dds dd_data_set Specify a data set. - \ :emphasis:`dd\_data\_set`\ can reference an existing data set. The data set referenced with \ :literal:`data\_set\_name`\ must be allocated before the module \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ is run, you can use \ `zos\_data\_set <./zos_data_set.html>`__\ to allocate a data set. + *dd_data_set* can reference an existing data set. The data set referenced with ``data_set_name`` must be allocated before the module `zos_mvs_raw <./zos_mvs_raw.html>`_ is run, you can use `zos_data_set <./zos_data_set.html>`_ to allocate a data set. | **required**: False | **type**: dict @@ -845,7 +845,7 @@ dds type - The data set type. Only required when \ :emphasis:`disposition=new`\ . + The data set type. Only required when *disposition=new*. Maps to DSNTYPE on z/OS. @@ -855,7 +855,7 @@ dds disposition - \ :emphasis:`disposition`\ indicates the status of a data set. + *disposition* indicates the status of a data set. Defaults to shr. @@ -865,7 +865,7 @@ dds disposition_normal - \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after normal termination of the program. + *disposition_normal* indicates what to do with the data set after normal termination of the program. | **required**: False | **type**: str @@ -873,7 +873,7 @@ dds disposition_abnormal - \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after abnormal termination of the program. + *disposition_abnormal* indicates what to do with the data set after abnormal termination of the program. | **required**: False | **type**: str @@ -881,15 +881,15 @@ dds reuse - Determines if data set should be reused if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. + Determines if data set should be reused if *disposition=new* and a data set with matching name already exists. - If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . + If *reuse=true*, *disposition* will be automatically switched to ``SHR``. - If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. + If *reuse=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`replace`\ . + Mutually exclusive with *replace*. - \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ + *reuse* is only considered when *disposition=new* | **required**: False | **type**: bool @@ -897,17 +897,17 @@ dds replace - Determines if data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. + Determines if data set should be replaced if *disposition=new* and a data set with matching name already exists. - If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. + If *replace=true*, the original data set will be deleted, and a new data set created. - If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. + If *replace=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`reuse`\ . + Mutually exclusive with *reuse*. - \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ + *replace* is only considered when *disposition=new* - \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. + *replace* will result in loss of all data in the original data set unless *backup* is specified. | **required**: False | **type**: bool @@ -915,9 +915,9 @@ dds backup - Determines if a backup should be made of existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. + Determines if a backup should be made of existing data set when *disposition=new*, *replace=true*, and a data set with the desired name is found. - \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . + *backup* is only used when *replace=true*. | **required**: False | **type**: bool @@ -925,7 +925,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . + The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. | **required**: False | **type**: str @@ -935,9 +935,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -946,9 +946,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -966,7 +966,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. + *sms_management_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -977,7 +977,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. + *sms_storage_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -988,7 +988,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. + *sms_data_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -999,7 +999,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -1015,9 +1015,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. + *key_label* is the public name of a protected encryption key in the ICSF key repository. - \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. + *key_label* should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -1039,7 +1039,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -1048,9 +1048,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD1 on z/OS. @@ -1074,7 +1074,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -1083,9 +1083,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD2 on z/OS. @@ -1098,7 +1098,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. + If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -1111,14 +1111,14 @@ dds The first byte of a logical record is position 0. - Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. + Provide *key_offset* only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g \ :literal:`80`\ ). + The logical record length. (e.g ``80``). For variable data sets, the length must include the 4-byte prefix area. @@ -1152,11 +1152,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -1191,7 +1191,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=ocreat`\ . + Or provide the path to an new created UNIX file when *status_group=ocreat*. The provided path must be absolute. @@ -1216,7 +1216,7 @@ dds mode - The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . + The file access attributes when the UNIX file is created specified in *path*. Specify the mode as an octal number similar to chmod. @@ -1227,47 +1227,47 @@ dds status_group - The status for the UNIX file specified in \ :emphasis:`path`\ . + The status for the UNIX file specified in *path*. - If you do not specify a value for the \ :emphasis:`status\_group`\ parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the *status_group* parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. + *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. - \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. + *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. - \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. + *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. - \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - \ :emphasis:`ononblock`\ specifies the following, depending on the type of file + *ononblock* specifies the following, depending on the type of file For a FIFO special file - 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. + 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. - 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. + 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. - \ :emphasis:`ononblock`\ has no effect on other file types. + *ononblock* has no effect on other file types. - \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . + *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. - When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. + When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -1276,7 +1276,7 @@ dds access_group - The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . + The kind of access to request for the UNIX file specified in *path*. | **required**: False | **type**: str @@ -1284,7 +1284,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . + The type of data that is (or will be) stored in the file specified in *path*. Maps to FILEDATA on z/OS. @@ -1297,7 +1297,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -1306,7 +1306,7 @@ dds record_length The logical record length for the UNIX file. - \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -1317,7 +1317,7 @@ dds record_format The record format for the UNIX file. - \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -1336,11 +1336,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -1366,7 +1366,7 @@ dds dd_input - \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. + *dd_input* is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -1377,15 +1377,15 @@ dds content The input contents for the DD. - \ :emphasis:`dd\_input`\ supports single or multiple lines of input. + *dd_input* supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. - When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -1403,11 +1403,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -1417,7 +1417,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. + for *dd_input*, *src_encoding* should generally not need to be changed. | **required**: False | **type**: str @@ -1440,7 +1440,7 @@ dds tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -1756,11 +1756,11 @@ Notes ----- .. note:: - When executing programs using \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ , you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. + When executing programs using `zos_mvs_raw <./zos_mvs_raw.html>`_, you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. - 1. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. + 1. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. - 2. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + 2. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. 3. When executing a program, refer to the programs documentation as each programs requirments can vary fom DDs, instream-data indentation and continuation characters. @@ -1838,7 +1838,7 @@ backups | **type**: str backup_name - The name of the data set containing the backup of content from data set in original\_name. + The name of the data set containing the backup of content from data set in original_name. | **type**: str diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index ff1e5fe87..9ad26d64c 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -52,7 +52,7 @@ wait_time_s This option is helpful on a busy system requiring more time to execute commands. - Setting \ :emphasis:`wait`\ can instruct if execution should wait the full \ :emphasis:`wait\_time\_s`\ . + Setting *wait* can instruct if execution should wait the full *wait_time_s*. | **required**: False | **type**: int diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index a03a17fdc..b2e99d399 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -31,7 +31,7 @@ system If the system name is not specified, all outstanding messages for that system and for the local systems attached to it are returned. - A trailing asterisk, (\*) wildcard is supported. + A trailing asterisk, (*) wildcard is supported. | **required**: False | **type**: str @@ -42,7 +42,7 @@ message_id If the message identifier is not specified, all outstanding messages for all message identifiers are returned. - A trailing asterisk, (\*) wildcard is supported. + A trailing asterisk, (*) wildcard is supported. | **required**: False | **type**: str @@ -53,7 +53,7 @@ job_name If the message job name is not specified, all outstanding messages for all job names are returned. - A trailing asterisk, (\*) wildcard is supported. + A trailing asterisk, (*) wildcard is supported. | **required**: False | **type**: str @@ -69,24 +69,24 @@ message_filter filter - Specifies the substring or regex to match to the outstanding messages, see \ :emphasis:`use\_regex`\ . + Specifies the substring or regex to match to the outstanding messages, see *use_regex*. All special characters in a filter string that are not a regex are escaped. - Valid Python regular expressions are supported. See \ `the official documentation <https://docs.python.org/library/re.html>`__\ for more information. + Valid Python regular expressions are supported. See `the official documentation <https://docs.python.org/library/re.html>`_ for more information. - Regular expressions are compiled with the flag \ :strong:`re.DOTALL`\ which makes the \ :strong:`'.'`\ special character match any character including a newline." + Regular expressions are compiled with the flag **re.DOTALL** which makes the **'.'** special character match any character including a newline." | **required**: True | **type**: str use_regex - Indicates that the value for \ :emphasis:`filter`\ is a regex or a string to match. + Indicates that the value for *filter* is a regex or a string to match. - If False, the module assumes that \ :emphasis:`filter`\ is not a regex and matches the \ :emphasis:`filter`\ substring on the outstanding messages. + If False, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. - If True, the module creates a regex from the \ :emphasis:`filter`\ string and matches it to the outstanding messages. + If True, the module creates a regex from the *filter* string and matches it to the outstanding messages. | **required**: False | **type**: bool @@ -222,7 +222,7 @@ actions | **sample**: STC01537 message_text - Content of the outstanding message requiring operator action awaiting a reply. If \ :emphasis:`message\_filter`\ is set, \ :emphasis:`message\_text`\ will be filtered accordingly. + Content of the outstanding message requiring operator action awaiting a reply. If *message_filter* is set, *message_text* will be filtered accordingly. | **returned**: success | **type**: str diff --git a/docs/source/modules/zos_ping.rst b/docs/source/modules/zos_ping.rst index acb901790..a4405b473 100644 --- a/docs/source/modules/zos_ping.rst +++ b/docs/source/modules/zos_ping.rst @@ -16,9 +16,9 @@ zos_ping -- Ping z/OS and check dependencies. Synopsis -------- -- \ `zos\_ping <./zos_ping.html>`__\ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. -- \ `zos\_ping <./zos_ping.html>`__\ returns \ :literal:`pong`\ when the target host is not missing any required dependencies. -- If the target host is missing optional dependencies, the \ `zos\_ping <./zos_ping.html>`__\ will return one or more warning messages. +- `zos_ping <./zos_ping.html>`_ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. +- `zos_ping <./zos_ping.html>`_ returns ``pong`` when the target host is not missing any required dependencies. +- If the target host is missing optional dependencies, the `zos_ping <./zos_ping.html>`_ will return one or more warning messages. - If a required dependency is missing from the target host, an explanatory message will be returned with the module failure. @@ -44,7 +44,7 @@ Notes ----- .. note:: - This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry \ :literal:`scp\_extra\_args="-O"`\ into the ini file named \ :literal:`ansible.cfg`\ . + This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry ``scp_extra_args="-O"`` into the ini file named ``ansible.cfg``. diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 6f36e05e2..31b237588 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -16,7 +16,7 @@ zos_script -- Run scripts in z/OS Synopsis -------- -- The \ `zos\_script <./zos_script.html>`__\ module runs a local or remote script in the remote machine. +- The `zos_script <./zos_script.html>`_ module runs a local or remote script in the remote machine. @@ -56,7 +56,7 @@ creates encoding Specifies which encodings the script should be converted from and to. - If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. + If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. | **required**: False | **type**: dict @@ -87,9 +87,9 @@ executable remote_src - If set to \ :literal:`false`\ , the module will search the script in the controller. + If set to ``false``, the module will search the script in the controller. - If set to \ :literal:`true`\ , the module will search the script in the remote machine. + If set to ``true``, the module will search the script in the remote machine. | **required**: False | **type**: bool @@ -103,13 +103,13 @@ removes use_template - Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when \ :literal:`src`\ is a local file or directory. + Only valid when ``src`` is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ | **required**: False | **type**: bool @@ -119,9 +119,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. - These options are ignored unless \ :literal:`use\_template`\ is true. + These options are ignored unless ``use_template`` is true. | **required**: False | **type**: dict @@ -200,7 +200,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -284,7 +284,7 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. - The location in the z/OS system where local scripts will be copied to can be configured through Ansible's \ :literal:`remote\_tmp`\ option. Refer to \ `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`__\ for more information. + The location in the z/OS system where local scripts will be copied to can be configured through Ansible's ``remote_tmp`` option. Refer to `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`_ for more information. All local scripts copied to a remote z/OS system will be removed from the managed node before the module finishes executing. @@ -292,13 +292,13 @@ Notes The module will only add execution permissions for the file owner. - If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error \ :literal:`BPXW0003I`\ . + If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with \ `zos\_tso\_command <./zos_tso_command.html>`__\ . + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. The community script module does not rely on Python to execute scripts on a managed node, while this module does. Python must be present on the remote machine. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index b35c13a1b..4af6b1b52 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -40,7 +40,7 @@ commands max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. | **required**: False | **type**: int @@ -119,7 +119,7 @@ output max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. | **returned**: always | **type**: int diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index a53747d6c..6d995b32f 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -16,8 +16,8 @@ zos_unarchive -- Unarchive files and data sets in z/OS. Synopsis -------- -- The \ :literal:`zos\_unarchive`\ module unpacks an archive after optionally transferring it to the remote system. -- For supported archive formats, see option \ :literal:`format`\ . +- The ``zos_unarchive`` module unpacks an archive after optionally transferring it to the remote system. +- For supported archive formats, see option ``format``. - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. @@ -33,11 +33,11 @@ Parameters src The remote absolute path or data set of the archive to be uncompressed. - \ :emphasis:`src`\ can be a USS file or MVS data set name. + *src* can be a USS file or MVS data set name. USS file paths should be absolute paths. - MVS data sets supported types are \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . + MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. | **required**: True | **type**: str @@ -72,14 +72,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. + When providing the *xmit_log_data_set* name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using \ :literal:`xmit`\ or \ :literal:`terse`\ . + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. | **required**: False | **type**: bool @@ -87,7 +87,7 @@ format dest_volumes - When \ :emphasis:`use\_adrdssu=True`\ , specify the volume the data sets will be written to. + When *use_adrdssu=True*, specify the volume the data sets will be written to. If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. @@ -103,7 +103,7 @@ format dest The remote absolute path or data set where the content should be unarchived to. - \ :emphasis:`dest`\ can be a USS file, directory or MVS data set name. + *dest* can be a USS file, directory or MVS data set name. If dest has missing parent directories, they will not be created. @@ -116,7 +116,7 @@ group When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str @@ -125,13 +125,13 @@ group mode The permission of the uncompressed files. - If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. + The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. - \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. + *mode=preserve* means that the file will be given the same permissions as the source file. | **required**: False | **type**: str @@ -149,7 +149,7 @@ owner include A list of directories, files or data set names to extract from the archive. - When \ :literal:`include`\ is set, only those files will we be extracted leaving the remaining files in the archive. + When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. Mutually exclusive with exclude. @@ -177,7 +177,7 @@ list dest_data_set - Data set attributes to customize a \ :literal:`dest`\ data set that the archive will be copied into. + Data set attributes to customize a ``dest`` data set that the archive will be copied into. | **required**: False | **type**: dict @@ -200,18 +200,18 @@ dest_data_set space_primary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int space_secondary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -220,7 +220,7 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -228,7 +228,7 @@ dest_data_set record_format - If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) + If the destination data set does not exist, this sets the format of the data set. (e.g ``fb``) Choices are case-sensitive. @@ -265,9 +265,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -276,9 +276,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -327,7 +327,7 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -342,9 +342,9 @@ force remote_src - If set to true, \ :literal:`zos\_unarchive`\ retrieves the archive from the remote system. + If set to true, ``zos_unarchive`` retrieves the archive from the remote system. - If set to false, \ :literal:`zos\_unarchive`\ searches the local machine (Ansible controller) for the archive. + If set to false, ``zos_unarchive`` searches the local machine (Ansible controller) for the archive. | **required**: False | **type**: bool @@ -404,7 +404,7 @@ Notes .. note:: VSAMs are not supported. - This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst index 25a0897b9..195435924 100644 --- a/docs/source/modules/zos_volume_init.rst +++ b/docs/source/modules/zos_volume_init.rst @@ -17,14 +17,14 @@ zos_volume_init -- Initialize volumes or minidisks. Synopsis -------- - Initialize a volume or minidisk on z/OS. -- \ :emphasis:`zos\_volume\_init`\ will create the volume label and entry into the volume table of contents (VTOC). +- *zos_volume_init* will create the volume label and entry into the volume table of contents (VTOC). - Volumes are used for storing data and executable programs. - A minidisk is a portion of a disk that is linked to your virtual machine. - A VTOC lists the data sets that reside on a volume, their location, size, and other attributes. -- \ :emphasis:`zos\_volume\_init`\ uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class \`STGADMIN.ICK.INIT\`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. -- ICKDSF is an Authorized Program Facility (APF) program on z/OS, \ :emphasis:`zos\_volume\_init`\ will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. +- *zos_volume_init* uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class `STGADMIN.ICK.INIT`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. +- ICKDSF is an Authorized Program Facility (APF) program on z/OS, *zos_volume_init* will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. - Note that defaults set on target z/OS systems may override ICKDSF parameters. -- If is recommended that data on the volume is backed up as the \ :emphasis:`zos\_volume\_init`\ module will not perform any backups. You can use the \ `zos\_backup\_restore <./zos_backup_restore.html>`__\ module to backup a volume. +- If is recommended that data on the volume is backed up as the *zos_volume_init* module will not perform any backups. You can use the `zos_backup_restore <./zos_backup_restore.html>`_ module to backup a volume. @@ -35,9 +35,9 @@ Parameters address - \ :emphasis:`address`\ is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. + *address* is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. - \ :emphasis:`address`\ can be the number assigned to the device (device number) when it is installed or the virtual address. + *address* can be the number assigned to the device (device number) when it is installed or the virtual address. | **required**: True | **type**: str @@ -46,15 +46,15 @@ address verify_volid Verify that the volume serial matches what is on the existing volume or minidisk. - \ :emphasis:`verify\_volid`\ must be 1 to 6 alphanumeric characters or \ :literal:`\*NONE\*`\ . + *verify_volid* must be 1 to 6 alphanumeric characters or ``*NONE*``. - To verify that a volume serial number does not exist, use \ :emphasis:`verify\_volid=\*NONE\*`\ . + To verify that a volume serial number does not exist, use *verify_volid=*NONE**. - If \ :emphasis:`verify\_volid`\ is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. + If *verify_volid* is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. - If \ :emphasis:`verify\_volid=\*NONE\*`\ is specified and a volume serial is found on the volume or minidisk, initialization does not complete. + If *verify_volid=*NONE** is specified and a volume serial is found on the volume or minidisk, initialization does not complete. - Note, this option is \ :strong:`not`\ a boolean, leave it blank to skip the verification. + Note, this option is **not** a boolean, leave it blank to skip the verification. | **required**: False | **type**: str @@ -73,11 +73,11 @@ volid Expects 1-6 alphanumeric, national ($,#,@) or special characters. - A \ :emphasis:`volid`\ with less than 6 characters will be padded with spaces. + A *volid* with less than 6 characters will be padded with spaces. - A \ :emphasis:`volid`\ can also be referred to as volser or volume serial number. + A *volid* can also be referred to as volser or volume serial number. - When \ :emphasis:`volid`\ is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. + When *volid* is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. | **required**: False | **type**: str @@ -99,7 +99,7 @@ index The VTOC index enhances the performance of VTOC access. - When set to \ :emphasis:`false`\ , no index will be created. + When set to *false*, no index will be created. | **required**: False | **type**: bool @@ -109,7 +109,7 @@ index sms_managed Specifies that the volume be managed by Storage Management System (SMS). - If \ :emphasis:`sms\_managed`\ is \ :emphasis:`true`\ then \ :emphasis:`index`\ must also be \ :emphasis:`true`\ . + If *sms_managed* is *true* then *index* must also be *true*. | **required**: False | **type**: bool @@ -127,7 +127,7 @@ verify_volume_empty tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index ef0f6c183..c1eecd254 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -21,7 +21,7 @@ user action is required, this documentation is reference only. * `zos_job_submit`_: Used to submit a job from the controller to the z/OS manage node. * `zos_ping`_: Used to transfer the modules REXX source to the z/OS managed node. * `zos_script`_: Used to transfer scripts from the controller to the z/OS manage node. -* `_zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. +* `zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. .. _zos_copy: modules/zos_copy.html @@ -35,3 +35,4 @@ user action is required, this documentation is reference only. modules/zos_script.html .. _zos_unarchive: modules/zos_unarchive.html + \ No newline at end of file From 133519dd560069865cda08406dc0c4f1c1516cfa Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 8 May 2024 22:38:59 -0700 Subject: [PATCH 368/413] Updated and formated migration guide for case sensitivity Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1388-lowercase-choices.yml | 57 ++++++++++++------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml index 0f14f42fe..e181d8b0f 100644 --- a/changelogs/fragments/1388-lowercase-choices.yml +++ b/changelogs/fragments/1388-lowercase-choices.yml @@ -1,11 +1,14 @@ breaking_changes: - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``space_type`` of ``dest_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dest_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_backup_restore - option ``space_type`` no longer accepts uppercase @@ -13,18 +16,23 @@ breaking_changes: (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``space_type`` of ``dest_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dest_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_data_set - option ``type`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Option ``space_type`` no longer accepts uppercase choices, users + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Option ``record_format`` no longer accepts uppercase choices, users + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``record_format`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Options inside ``batch`` no longer accept uppercase choices, users should + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_job_submit - option ``location`` no longer accepts uppercase choices, @@ -32,34 +40,45 @@ breaking_changes: (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_mount - option ``fs_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Option ``unmount_opts`` no longer accepts uppercase choices, users + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Option ``mount_opts`` no longer accepts uppercase choices, users + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Option ``tag_untagged`` no longer accepts uppercase choices, users + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Option ``automove`` no longer accepts uppercase choices, users + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``automove`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboptions ``disposition_normal`` and ``disposition_abnormal`` of + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. This also applies when defining a ``dd_data_set`` inside ``dd_concat``. - Suboption ``space_type`` of ``dd_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dd_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dd_unix`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Options inside ``dd_concat`` no longer accept uppercase choices, + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``space_type`` of ``dest_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dest_data_set`` no longer accepts + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). From f7d799fb6fa22f1276055cecfed233ad779ed7bb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 8 May 2024 22:39:27 -0700 Subject: [PATCH 369/413] summary fragement for changelog Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/v1.10.0-beta.1_summary.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 changelogs/fragments/v1.10.0-beta.1_summary.yml diff --git a/changelogs/fragments/v1.10.0-beta.1_summary.yml b/changelogs/fragments/v1.10.0-beta.1_summary.yml new file mode 100644 index 000000000..0c1e35217 --- /dev/null +++ b/changelogs/fragments/v1.10.0-beta.1_summary.yml @@ -0,0 +1,6 @@ +release_summary: | + Release Date: '2024-05-08' + This changelog describes all changes made to the modules and plugins included + in this collection. The release date is the date the changelog is created. + For additional details such as required dependencies and availability review + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From bee2251cf37d3c2c2d9cceb6dfc2de8e25b28165 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 8 May 2024 22:40:39 -0700 Subject: [PATCH 370/413] Changelog generation updates Signed-off-by: ddimatos <dimatos@gmail.com> --- CHANGELOG.rst | 75 ++++++++++++--- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 167 ++++++++++++++++++++++++++++++++++ 3 files changed, 232 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d2f69d546..7b5c63952 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,9 +1,70 @@ -============================== -ibm.ibm_zos_core Release Notes -============================== +================================ +ibm.ibm\_zos\_core Release Notes +================================ .. contents:: Topics +v1.10.0-beta.1 +============== + +Release Summary +--------------- + +Release Date: '2024-05-08' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). +- zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265). +- zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307). +- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183). + +Breaking Changes / Porting Guide +-------------------------------- + +- zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - option ``record_format`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - option ``type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_job_submit - option ``location`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``automove`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``fs_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + +Bugfixes +-------- + +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261). +- zos_apf - List option only returned one data set. Fix now returns the list of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). +- zos_blockinfile - Using double quotation marks inside a block resulted in a false positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340). +- zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220). +- zos_job_submit - when the argument max_rc was different than 0 the changed response returned as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345). +- zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created during the module execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1320). v1.9.0 ====== @@ -336,7 +397,6 @@ in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -360,7 +420,6 @@ in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Major Changes ------------- @@ -432,7 +491,6 @@ in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Minor Changes ------------- @@ -461,7 +519,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -486,7 +543,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -505,7 +561,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -540,7 +595,6 @@ the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ inventory and a variables configuration. - Automate software management with SMP/E Playbooks - Minor Changes ------------- @@ -619,7 +673,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Minor Changes ------------- diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 4e2979ebb..e5bd167b7 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -135,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.9.0 +version: 1.10.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index a8404bf84..6e034e91c 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -78,6 +78,173 @@ releases: name: zos_tso_command namespace: '' release_date: '2022-06-07' + 1.10.0-beta.1: + changes: + breaking_changes: + - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``record_format`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``space_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - options inside ``batch`` no longer accept uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_submit - option ``location`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``automove`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``fs_type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` + of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. + This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer + accepts uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261). + - zos_apf - List option only returned one data set. Fix now returns the list + of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + - zos_blockinfile - Using double quotation marks inside a block resulted in + a false positive result with ZOAU 1.3. Fix now handles this special case to + avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340). + - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets + the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220). + - zos_job_submit - when the argument max_rc was different than 0 the changed + response returned as false. Fix now return a changed response as true when + the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345). + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating + temporary data sets. Fix now honors the value if provided and uses it as High + Level Qualifier for temporary data sets created during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). + minor_changes: + - zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + - zos_backup_restore - Add tmp_hlq option to the user interface to override + the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265). + - zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + - zos_copy - Improve zos_copy performance when copying multiple members from + one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183). + release_summary: 'Release Date: ''2024-05-08'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1032-clean-job_submit-test.yml + - 1152-zos-lineinfile-remove-zos_copy-dependency.yml + - 1156-zos_archive-remove-zos_copy_dep.yml + - 1157-remove-zos-copy-from-zos-encode-tests.yml + - 1165-remove-zos-copy-dep-from-zos-fetch.yml + - 1167-remove-zos-copy-from-zos-blockinfile-tests.yml + - 1169-util-job-zoau-migration.yml + - 1179-remove-zos_encode-from_zos_lineinfile-tests.yml + - 1181-zoau-migration-zos_operator.yml + - 1182-migrate-module-utils-data-set.yml + - 1183-copy-members.yml + - 1184-remove-zos-fetch-dep-from-zos-copy.yml + - 1187-migrate-module-utils-copy.yml + - 1188-migrate-module_utils-backup.yml + - 1189-migrate-module_utils-encode.yml + - 1190-migrate-module_utils-dd_statement.yml + - 1196-zoau-migration-zos_gather_facts.yml + - 1202-doc-gen-script-portability.yml + - 1204-migrate-zos_apf.yml + - 1209-zoau-migration-zos_job_submit.yml + - 1215-Migrate_zos_operator_action_query.yml + - 1216-Validate_module_zos_job_output_migration.yml + - 1217-validate-job-query.yml + - 1218-migrate-zos_encode.yml + - 1220-bugfix-zos_job_submit-default_value.yml + - 1222-zoau-migration-zos_copy.yml + - 1227-migrate-zos_archive.yml + - 1228-zos_find-remove-zos_lineinfile_dep.yml + - 1229-migrate-zos_fetch.yml + - 1237-migrate-zos_mount.yml + - 1238-migrate-zos_unarchive.yml + - 1242-zoau-migration-zos_data_set.yml + - 1256_Migrate_zos_blockinfile_and_lineinfile.yml + - 1257-zoau-import-zos_apf.yml + - 1261-job-submit-non-utf8-chars.yml + - 1265_Migrate_zos_backup_restore.yml + - 1270-quick-fix-len-of-volumes-work-around.yml + - 1286-update-zos_archive-zos_unarchive-docs.yml + - 1295-doc-zos_ping-scp.yml + - 1298-Remove_local_charset_from_zos_fetch.yml + - 1307-update-sanity-zos_copy.yml + - 1320-Zos_mvs_raw_ignores_tmp_hlq.yml + - 1322-update-docstring-encode.yml + - 1331-update-docstring-ickdsf.yml + - 1332-update-docstring-import_handler.yml + - 1333-update-docstring-job.yml + - 1336-update-docstring-validation.yml + - 1340-Work_around_fix_false_positive.yml + - 1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml + - 1347-update-docstring-zos_data_set.yml + - 1348-update-docstring-zos_encode.yml + - 1349-update-docstring-zos_fetch.yml + - 1350-update-docstring-zos_find.yml + - 1351-update-docstring-zos_gather_facts.yml + - 1352-update-docstring-zos_job_output.yml + - 1353-update-docstring-zos_job_query.yml + - 1354-update-docstring-zos_job_submit.yml + - 1355-update-docstring-zos_lineinfile.yml + - 1356-update-docstring-zos_mount.yml + - 1388-lowercase-choices.yml + - 1390-update-docstring-zos_script.yml + - 1391-update-docstring-zos_tso_command.yml + - 1392-update-docstring-zos_volume_init.yml + - 1393-update-docstring-zos_apf.yml + - 1394-Update_docstring-zos_operator_action_query.yml + - 1443-zos_find-filter-size.yml + - 692-changelog-lint-ac-tool.yml + - 971-bug-job_submit-can-stacktrace.yml + - 992-fix-sanity4to6.yml + - v1.10.0-beta.1_summary.yml + release_date: '2024-05-08' 1.2.1: changes: bugfixes: From bded8ba97b1a71981326646893e5a9b7d6b5b711 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 9 May 2024 14:38:44 -0700 Subject: [PATCH 371/413] Update release notes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 96 +++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 7c2c3a929..1bef46418 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,100 @@ Releases ======== +Version 1.10.0-beta.1 +===================== + +Major Changes +------------- + +- Starting with IBM Ansible z/OS core version 1.10.x, ZOAU version 1.3.0 will be required. +- Starting with IBM Ansible z/OS core version 1.10.x, all module options are case sensitive, + review the porting guide for specifics. + +Minor Changes +------------- + +- ``zos_apf`` - Enhanced error messages when an exception is caught. +- ``zos_backup_restore`` - Add tmp_hlq option to the user interface to override the default high level qualifier (HLQ) for temporary and backup. +- ``zos_copy`` - Documented module options `group` and `owner`. + +Bugfixes +-------- + +- ``zos_apf`` - Option **list** previously only returned one data set, now it returns a list of retrieved data sets. +- ``zos_blockinfile`` - Option **block** when containing double double quotation marks results in a task failure (failed=True); now the module handles this case to avoid failure. +- ``zos_find`` - Option **size** failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported. +- ``zos_job_submit`` + + - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. + - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. + +- zos_mvs_raw - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1320). + +Porting Guide +------------- + +- ``zos_archive`` + + - option **terse_pack** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_backup_restore`` - option **space_type** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_copy`` + + - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_data_set`` + + - option **record_format** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **space_type** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **type** no longer accepts uppercase choices, users should replace them with lowercase ones. + - options inside **batch** no longer accept uppercase choices, users should replace them with lowercase ones. + +- ``zos_job_submit`` - option **location** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_mount`` + + - option **automove** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **fs_type** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **mount_opts** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **tag_untagged** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **unmount_opts** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_mvs_raw`` + + - options inside **dd_concat** no longer accept uppercase choices, users should replace them with lowercase ones. + - suboption **record_format** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **record_format** of **dd_unix** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboptions **disposition_normal** and **disposition_abnormal** of **dd_data_set** no longer accept **catlg** and **uncatlg** as choices. This also applies when defining a **dd_data_set** inside **dd_concat**. + +- ``zos_unarchive`` + + - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ +* Supported by IBM `Z Open Automation Utilities 1.3.0`_ or later. + Version 1.9.0 ============= @@ -1047,6 +1141,8 @@ Known issues https://www.ibm.com/docs/en/zoau/1.2.x .. _Z Open Automation Utilities 1.2.5: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.3.0: + https://www.ibm.com/docs/en/zoau/1.3.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS®: From 34330879c094300b026999e3234daa25acdcdb15 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 9 May 2024 15:08:06 -0700 Subject: [PATCH 372/413] Update unsued vars via pyflakes Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/action/zos_copy.py | 2 +- plugins/module_utils/data_set.py | 2 +- plugins/module_utils/job.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 7d9b4d3fd..d4c7b2166 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -245,7 +245,7 @@ def run(self, tmp=None, task_vars=None): original_src = task_args.get("src") if original_src: if not remote_src: - base_name = os.path.basename(original_src) + os.path.basename(original_src) if original_src.endswith("/"): src = temp_path + "/" else: diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 40c1a4047..94ee8e10e 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1028,7 +1028,7 @@ def create( create_exception.response.rc, create_exception.response.stdout_response + "\n" + create_exception.response.stderr_response ) - except exceptions.DatasetVerificationError as e: + except exceptions.DatasetVerificationError: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 if volumes and len(volumes) > 1: if DataSet.data_set_cataloged(name, volumes): diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index c25789030..8d9ac3a5c 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -385,7 +385,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T try: list_of_dds = jobs.list_dds(entry.job_id) - except exceptions.DDQueryException as err: + except exceptions.DDQueryException: is_dd_query_exception = True # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. @@ -406,7 +406,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T list_of_dds = jobs.list_dds(entry.job_id) is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False - except exceptions.DDQueryException as err: + except exceptions.DDQueryException: is_dd_query_exception = True continue @@ -463,7 +463,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T single_dd["step_name"], single_dd["dd_name"] ) - except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) as e: + except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError): tmpcont = ( "Non-printable UTF-8 characters were present in this output. " "Please access it from the job log." From 1ebac0407e7e87deb7c35431dfc1be1f5a562602 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 9 May 2024 15:12:26 -0700 Subject: [PATCH 373/413] Wrong indentation: expected 6 but found 8 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/filter/wtor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py index 17b530218..2ef3a3cbb 100644 --- a/plugins/filter/wtor.py +++ b/plugins/filter/wtor.py @@ -54,8 +54,8 @@ - name: Evaluate if there are any existing dump messages matching 'IEE094D SPECIFY OPERAND' assert: that: - - is_specify_operand is defined - - bool_zos_operator_action_continue + - is_specify_operand is defined + - bool_zos_operator_action_continue success_msg: "Found 'IEE094D SPECIFY OPERAND' message." fail_msg: "Did not find 'IEE094D SPECIFY OPERAND' message." """ From 9fc22e7e016a3ccab8e362b00137e14dde1b0a91 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 9 May 2024 20:52:28 -0700 Subject: [PATCH 374/413] Lint corrections supporting YAML revision v1.2.2 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_apf.py | 4 +- plugins/modules/zos_archive.py | 2 +- plugins/modules/zos_backup_restore.py | 6 +- plugins/modules/zos_copy.py | 2 +- plugins/modules/zos_data_set.py | 14 +-- plugins/modules/zos_encode.py | 5 +- plugins/modules/zos_lineinfile.py | 5 +- plugins/modules/zos_mount.py | 17 ++- plugins/modules/zos_mvs_raw.py | 107 +++++++++---------- plugins/modules/zos_operator_action_query.py | 2 +- plugins/modules/zos_unarchive.py | 4 +- plugins/modules/zos_volume_init.py | 14 +-- 12 files changed, 89 insertions(+), 93 deletions(-) diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index ecf7df74c..c9cc8ba6b 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -221,7 +221,7 @@ - name: Add a library (cataloged) to the APF list and persistence zos_apf: library: SOME.SEQUENTIAL.DATASET - force_dynamic: True + force_dynamic: true persistent: data_set_name: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence @@ -239,7 +239,7 @@ batch: - library: SOME.SEQ.DS1 - library: SOME.SEQ.DS2 - sms: True + sms: true - library: SOME.SEQ.DS3 volume: T12345 - name: Print the APF list matching library pattern or volume serial number diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 8a3961c8b..aca95e2f7 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -357,7 +357,7 @@ name: terse format_options: terse_pack: "spack" - use_adrdssu: True + use_adrdssu: true # Use a pattern to store - name: Compress data set pattern using xmit diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index a112da247..7ac50b550 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -223,7 +223,7 @@ data_sets: include: user.** backup_name: /tmp/temp_backup.dzp - recover: yes + recover: true - name: Backup all datasets matching the pattern USER.** to data set MY.BACKUP.DZP, allocate 100MB for data sets used in backup process. @@ -253,7 +253,7 @@ operation: backup backup_name: /tmp/temp_backup.dzp volume: MYVOL1 - full_volume: yes + full_volume: true space: 1 space_type: g @@ -296,7 +296,7 @@ zos_backup_restore: operation: restore volume: MYVOL2 - full_volume: yes + full_volume: true backup_name: MY.BACKUP.DZP space: 1 space_type: g diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 489c71593..075162e69 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -512,7 +512,7 @@ zos_copy: src: /path/to/foo.conf dest: /etc/foo.conf - mode: 0644 + mode: "0644" group: foo owner: bar diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index b500eb84a..4eae68733 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -569,7 +569,7 @@ space_type: m record_format: u record_length: 25 - replace: yes + replace: true - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: @@ -580,7 +580,7 @@ record_format: u record_length: 25 volumes: "222222" - replace: yes + replace: true - name: Create an ESDS data set if it does not exist zos_data_set: @@ -615,7 +615,7 @@ zos_data_set: name: someds.name.here(mydata) type: member - replace: yes + replace: true - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: @@ -633,22 +633,22 @@ name: someds.name.here(mydata) state: absent type: member - force: yes + force: true - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - - name: someds.name.here1 + - name: someds.name.here1 type: pds space_primary: 5 space_type: m record_format: fb - replace: yes + replace: true - name: someds.name.here1(member1) type: member - name: someds.name.here2(member1) type: member - replace: yes + replace: true - name: someds.name.here2(member2) type: member diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index b92fdc72b..e9afa4994 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -140,8 +140,8 @@ encoding: from: IBM-1047 to: ISO8859-1 - backup: yes - backup_compress: yes + backup: true + backup_compress: true - name: Convert file encoding from IBM-1047 to ISO8859-1 to a directory zos_encode: @@ -249,7 +249,6 @@ encoding: from: ISO8859-1 to: IBM-1047 - """ RETURN = r""" diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 43e85061b..0988ef2d4 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -229,15 +229,14 @@ src: /tmp/src/somefile regexp: '^(.*)User(\d+)m(.*)$' line: '\1APPUser\3' - backrefs: yes + backrefs: true - name: Add a line to a member while a task is in execution zos_lineinfile: src: SOME.PARTITIONED.DATA.SET(DATA) insertafter: EOF line: 'Should be a working test now' - force: True - + force: true """ RETURN = r""" diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index f16ddfe29..7b4b04654 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -344,8 +344,8 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + comment: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -354,10 +354,10 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - backup: Yes - backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + backup: true + backup_name: SYS1.PARMLIB(BPXPRMAB) + comment: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -365,7 +365,7 @@ path: /u/omvsadm/core fs_type: zfs state: mounted - allow_uid: no + allow_uid: false - name: Mount a filesystem asynchronously (don't wait for completion). zos_mount: @@ -400,7 +400,6 @@ state: mounted automove: AUTOMOVE automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7 - """ RETURN = r""" diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index bbb187ef9..25bfe56dc 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -1291,7 +1291,7 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1315,7 +1315,7 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1360,7 +1360,7 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1388,8 +1388,8 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - replace: yes - backup: yes + replace: true + backup: true type: seq space_primary: 5 space_secondary: 1 @@ -1460,7 +1460,7 @@ - name: Take a set of data sets and write them to an archive. zos_mvs_raw: program_name: adrdssu - auth: yes + auth: true dds: - dd_data_set: dd_name: archive @@ -1476,7 +1476,7 @@ - name: Merge two sequential data sets and write them to new data set zos_mvs_raw: program_name: sort - auth: no + auth: false parm: "MSGPRT=CRITICAL,LIST" dds: - dd_data_set: @@ -1507,7 +1507,7 @@ files. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - dd_concat: dd_name: sysprint @@ -1524,57 +1524,56 @@ dd_name: sysin content: " LISTCAT ENTRIES('SYS1.*')" -- name: Drop the contents of input dataset into output dataset - using REPRO command. +- name: Drop the contents of input dataset into output dataset using REPRO command. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - - dd_data_set: - dd_name: INPUT - data_set_name: myhlq.ds1.input - - dd_data_set: - dd_name: OUTPUT - data_set_name: myhlq.ds1.output - - dd_input: - dd_name: sysin - content: | + - dd_data_set: + dd_name: INPUT + data_set_name: myhlq.ds1.input + - dd_data_set: + dd_name: OUTPUT + data_set_name: myhlq.ds1.output + - dd_input: + dd_name: sysin + content: | " REPRO - INFILE(INPUT) - OUTFILE(OUTPUT)" - - dd_output: - dd_name: sysprint - return_content: - type: text - - - name: Define a cluster using a literal block style indicator - with a 2 space indentation. - zos_mvs_raw: - program_name: idcams - auth: yes - dds: - - dd_output: - dd_name: sysprint - return_content: - type: text - - dd_input: - dd_name: sysin - content: |2 - DEFINE CLUSTER - - (NAME(ANSIBLE.TEST.VSAM) - - CYL(10 10) - - FREESPACE(20 20) - - INDEXED - - KEYS(32 0) - - NOERASE - - NONSPANNED - - NOREUSE - - SHAREOPTIONS(3 3) - - SPEED - - UNORDERED - - RECORDSIZE(4086 32600) - - VOLUMES(222222) - - UNIQUE) + - dd_output: + dd_name: sysprint + return_content: + type: text + +- name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: 2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ed426e9b5..415d94f3e 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -121,7 +121,7 @@ system: mv29 message_filter: filter: ^.*IMS.*$ - use_regex: yes + use_regex: true """ RETURN = r""" diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index f824459c4..cb587dc0e 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -356,8 +356,8 @@ format: name: xmit format_options: - use_adrdssu: True - list: True + use_adrdssu: true + list: true ''' RETURN = r''' diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py index 0be4f2a8f..d0a2c55be 100644 --- a/plugins/modules/zos_volume_init.py +++ b/plugins/modules/zos_volume_init.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -151,14 +151,14 @@ zos_volume_init: address: "1234" volid: "DEMO01" - sms_managed: no + sms_managed: false - name: Initialize non-SMS managed target volume with all the default options and override the default high level qualifier (HLQ). zos_volume_init: address: 1234 volid: DEMO01 - sms_managed: no + sms_managed: false tmp_hlq: TESTUSR - name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as @@ -167,12 +167,12 @@ zos_volume_init: address: e8d8 vtoc_size: 30 - index: yes - sms_managed: yes + index: true + sms_managed: true volid: ine8d8 verify_volid: ine8d8 - verify_volume_empty: yes - verify_offline: no + verify_volume_empty: true + verify_offline: false - name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' using Ansible loops. From 1968afe46838ae4c801d79200e09c102909f01a8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 9 May 2024 23:06:16 -0700 Subject: [PATCH 375/413] Update docs with lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_apf.rst | 4 +- docs/source/modules/zos_apf.rst-e | 4 +- docs/source/modules/zos_archive.rst | 2 +- docs/source/modules/zos_backup_restore.rst | 6 +- docs/source/modules/zos_copy.rst | 2 +- docs/source/modules/zos_data_set.rst | 14 +-- docs/source/modules/zos_encode.rst | 5 +- docs/source/modules/zos_lineinfile.rst | 5 +- docs/source/modules/zos_mount.rst | 15 ++- docs/source/modules/zos_mvs_raw.rst | 105 +++++++++--------- .../modules/zos_operator_action_query.rst | 2 +- docs/source/modules/zos_unarchive.rst | 4 +- docs/source/modules/zos_volume_init.rst | 12 +- 13 files changed, 88 insertions(+), 92 deletions(-) diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index c7e0db588..a94fdc95e 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -225,7 +225,7 @@ Examples - name: Add a library (cataloged) to the APF list and persistence zos_apf: library: SOME.SEQUENTIAL.DATASET - force_dynamic: True + force_dynamic: true persistent: data_set_name: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence @@ -243,7 +243,7 @@ Examples batch: - library: SOME.SEQ.DS1 - library: SOME.SEQ.DS2 - sms: True + sms: true - library: SOME.SEQ.DS3 volume: T12345 - name: Print the APF list matching library pattern or volume serial number diff --git a/docs/source/modules/zos_apf.rst-e b/docs/source/modules/zos_apf.rst-e index 60c036dcf..b758d3129 100644 --- a/docs/source/modules/zos_apf.rst-e +++ b/docs/source/modules/zos_apf.rst-e @@ -225,7 +225,7 @@ Examples - name: Add a library (cataloged) to the APF list and persistence zos_apf: library: SOME.SEQUENTIAL.DATASET - force_dynamic: True + force_dynamic: true persistent: data_set_name: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence @@ -243,7 +243,7 @@ Examples batch: - library: SOME.SEQ.DS1 - library: SOME.SEQ.DS2 - sms: True + sms: true - library: SOME.SEQ.DS3 volume: T12345 - name: Print the APF list matching library pattern or volume serial number diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index f07aa931e..f2971fc6a 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -374,7 +374,7 @@ Examples name: terse format_options: terse_pack: "spack" - use_adrdssu: True + use_adrdssu: true # Use a pattern to store - name: Compress data set pattern using xmit diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 66d0d0f4b..69ca57cda 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -241,7 +241,7 @@ Examples data_sets: include: user.** backup_name: /tmp/temp_backup.dzp - recover: yes + recover: true - name: Backup all datasets matching the pattern USER.** to data set MY.BACKUP.DZP, allocate 100MB for data sets used in backup process. @@ -271,7 +271,7 @@ Examples operation: backup backup_name: /tmp/temp_backup.dzp volume: MYVOL1 - full_volume: yes + full_volume: true space: 1 space_type: g @@ -314,7 +314,7 @@ Examples zos_backup_restore: operation: restore volume: MYVOL2 - full_volume: yes + full_volume: true backup_name: MY.BACKUP.DZP space: 1 space_type: g diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 453f07302..d8319ece9 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -635,7 +635,7 @@ Examples zos_copy: src: /path/to/foo.conf dest: /etc/foo.conf - mode: 0644 + mode: "0644" group: foo owner: bar diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 668b367bf..34162d72e 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -588,7 +588,7 @@ Examples space_type: m record_format: u record_length: 25 - replace: yes + replace: true - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: @@ -599,7 +599,7 @@ Examples record_format: u record_length: 25 volumes: "222222" - replace: yes + replace: true - name: Create an ESDS data set if it does not exist zos_data_set: @@ -634,7 +634,7 @@ Examples zos_data_set: name: someds.name.here(mydata) type: member - replace: yes + replace: true - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: @@ -652,22 +652,22 @@ Examples name: someds.name.here(mydata) state: absent type: member - force: yes + force: true - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - - name: someds.name.here1 + - name: someds.name.here1 type: pds space_primary: 5 space_type: m record_format: fb - replace: yes + replace: true - name: someds.name.here1(member1) type: member - name: someds.name.here2(member1) type: member - replace: yes + replace: true - name: someds.name.here2(member2) type: member diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 4c2294e24..2134b336e 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -143,8 +143,8 @@ Examples encoding: from: IBM-1047 to: ISO8859-1 - backup: yes - backup_compress: yes + backup: true + backup_compress: true - name: Convert file encoding from IBM-1047 to ISO8859-1 to a directory zos_encode: @@ -256,7 +256,6 @@ Examples - Notes ----- diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index f7005017e..4e416f97f 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -239,15 +239,14 @@ Examples src: /tmp/src/somefile regexp: '^(.*)User(\d+)m(.*)$' line: '\1APPUser\3' - backrefs: yes + backrefs: true - name: Add a line to a member while a task is in execution zos_lineinfile: src: SOME.PARTITIONED.DATA.SET(DATA) insertafter: EOF line: 'Should be a working test now' - force: True - + force: true diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index cfe2f0ae1..3b30be909 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -320,8 +320,8 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + comment: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -330,10 +330,10 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - backup: Yes - backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + backup: true + backup_name: SYS1.PARMLIB(BPXPRMAB) + comment: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -341,7 +341,7 @@ Examples path: /u/omvsadm/core fs_type: zfs state: mounted - allow_uid: no + allow_uid: false - name: Mount a filesystem asynchronously (don't wait for completion). zos_mount: @@ -380,7 +380,6 @@ Examples - Notes ----- diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 500802fe2..d98c9493b 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -1464,7 +1464,7 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1488,7 +1488,7 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1533,7 +1533,7 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1561,8 +1561,8 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - replace: yes - backup: yes + replace: true + backup: true type: seq space_primary: 5 space_secondary: 1 @@ -1633,7 +1633,7 @@ Examples - name: Take a set of data sets and write them to an archive. zos_mvs_raw: program_name: adrdssu - auth: yes + auth: true dds: - dd_data_set: dd_name: archive @@ -1649,7 +1649,7 @@ Examples - name: Merge two sequential data sets and write them to new data set zos_mvs_raw: program_name: sort - auth: no + auth: false parm: "MSGPRT=CRITICAL,LIST" dds: - dd_data_set: @@ -1680,7 +1680,7 @@ Examples files. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - dd_concat: dd_name: sysprint @@ -1697,57 +1697,56 @@ Examples dd_name: sysin content: " LISTCAT ENTRIES('SYS1.*')" - - name: Drop the contents of input dataset into output dataset - using REPRO command. + - name: Drop the contents of input dataset into output dataset using REPRO command. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - - dd_data_set: - dd_name: INPUT - data_set_name: myhlq.ds1.input - - dd_data_set: - dd_name: OUTPUT - data_set_name: myhlq.ds1.output - - dd_input: - dd_name: sysin - content: | + - dd_data_set: + dd_name: INPUT + data_set_name: myhlq.ds1.input + - dd_data_set: + dd_name: OUTPUT + data_set_name: myhlq.ds1.output + - dd_input: + dd_name: sysin + content: | " REPRO - INFILE(INPUT) - OUTFILE(OUTPUT)" - - dd_output: - dd_name: sysprint - return_content: - type: text - - - name: Define a cluster using a literal block style indicator - with a 2 space indentation. - zos_mvs_raw: - program_name: idcams - auth: yes - dds: - - dd_output: - dd_name: sysprint - return_content: - type: text - - dd_input: - dd_name: sysin - content: |2 - DEFINE CLUSTER - - (NAME(ANSIBLE.TEST.VSAM) - - CYL(10 10) - - FREESPACE(20 20) - - INDEXED - - KEYS(32 0) - - NOERASE - - NONSPANNED - - NOREUSE - - SHAREOPTIONS(3 3) - - SPEED - - UNORDERED - - RECORDSIZE(4086 32600) - - VOLUMES(222222) - - UNIQUE) + - dd_output: + dd_name: sysprint + return_content: + type: text + + - name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: 2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index b2e99d399..ba9398b50 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -128,7 +128,7 @@ Examples system: mv29 message_filter: filter: ^.*IMS.*$ - use_regex: yes + use_regex: true diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index 6d995b32f..f2d7aba8b 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -392,8 +392,8 @@ Examples format: name: xmit format_options: - use_adrdssu: True - list: True + use_adrdssu: true + list: true diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst index 195435924..5647ad998 100644 --- a/docs/source/modules/zos_volume_init.rst +++ b/docs/source/modules/zos_volume_init.rst @@ -159,14 +159,14 @@ Examples zos_volume_init: address: "1234" volid: "DEMO01" - sms_managed: no + sms_managed: false - name: Initialize non-SMS managed target volume with all the default options and override the default high level qualifier (HLQ). zos_volume_init: address: 1234 volid: DEMO01 - sms_managed: no + sms_managed: false tmp_hlq: TESTUSR - name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as @@ -175,12 +175,12 @@ Examples zos_volume_init: address: e8d8 vtoc_size: 30 - index: yes - sms_managed: yes + index: true + sms_managed: true volid: ine8d8 verify_volid: ine8d8 - verify_volume_empty: yes - verify_offline: no + verify_volume_empty: true + verify_offline: false - name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' using Ansible loops. From 7172e649b7ee36c603a157c1b929faccf1e0563e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 9 May 2024 23:13:38 -0700 Subject: [PATCH 376/413] Remove deprecated versions of the collection from release notes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 549 ---------------------------------- 1 file changed, 549 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 1bef46418..8508178ee 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -559,555 +559,6 @@ Reference * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. -Version 1.4.1 -============= - -Bug fixes ---------- - -* ``zos_copy`` - - * Copy failed from a loadlib member to another loadlib member. Fix - now looks for error in stdout in the if statement to use -X option. - * Fixes a bug where files not encoded in IBM-1047 would trigger an - error while computing the record length for a new destination dataset. - * Fixes a bug where the code for fixing an issue with newlines in - files. - * fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system to - its initial state in case of a module failure only when force is false. - * fixes a bug where the computed record length for a new destination - dataset would include newline characters. - -* ``zos_job_query`` - - * fixes a bug where a boolean was not being properly compared. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.4.0 -============= - -* Modules - - * ``zos_mount`` can manage mount operations for a - z/OS UNIX System Services (USS) file system data set. - -* Plugins - - * ``zos_ssh`` connection plugin has been removed from this release and is no - longer a dependency for the ``zos_ping`` module. - -* Bug fixes and enhancements - - * Modules - - * ``zos_copy`` - - * introduced an updated creation policy referred to as precedence rules - that if `dest_data_set` is set, it will take precedence. If - `dest` is an empty data set, the empty data set will be written with the - expectation its attributes satisfy the copy. If no precedent rule - has been exercised, `dest` will be created with the same attributes of - `src`. - * introduced new computation capabilities that if `dest` is a nonexistent - data set, the attributes assigned will depend on the type of `src`. If - `src` is a USS file, `dest` will have a Fixed Block (FB) record format - and the remaining attributes will be computed. If `src` is binary, - `dest` will have a Fixed Block (FB) record format with a record length - of 80, block size of 32760, and the remaining attributes will be - computed. - * enhanced the force option when `force=true` and the remote file or - data set `dest`` is NOT empty, the `dest` will be deleted and recreated - with the `src` data set attributes, otherwise it will be recreated with - the `dest` data set attributes. - * was enhanced for when `src` is a directory and ends with "/", - the contents of it will be copied into the root of `dest`. It it doesn't - end with "/", the directory itself will be copied. - * option `dest_dataset` has been deprecated and removed in favor - of the new option `dest_data_set`. - * fixes a bug that when a directory is copied from the controller to the - managed node and a mode is set, the mode is applied to the directory - on the managed node. If the directory being copied contains files and - mode is set, mode will only be applied to the files being copied not the - pre-existing files. - * fixes a bug that did not create a data set on the specified volume. - * fixes a bug where a number of attributes were not an option when using - `dest_data_set`. - * fixes a bug where options were not defined in the module - argument spec that will result in error when running `ansible-core` - v2.11 and using options `force` or `mode`. - * was enhanced to support the ``ansible.builtin.ssh`` connection options; - for further reference refer to the `SSH plugin`_ documentation. - * was enhanced to take into account the record length when the - source is a USS file and the destination is a data set with a record - length. This is done by inspecting the destination data set attributes - and using these attributes to create a new data set. - * was updated with the capabilities to define destination data sets from - within the ``zos_copy`` module. In the case where you are copying to - data set destination that does not exist, you can now do so using the - new ``zos_copy`` module option ``destination_dataset``. - - * ``zos_operator`` - - * enhanced to allow for MVS operator `SET` command, `SET` is - equivalent to the abbreviated `T` command. - - * ``zos_mount`` fixed option `tag_ccsid` to correctly allow for type int. - - * ``module_utils`` - - * jobs.py - fixes a utility used by module `zos_job_output` that would - truncate the DD content. - - * ``zos_ping`` was enhanced to remove the need for the ``zos_ssh`` - connection plugin dependency. - - * ``zos_fetch`` was enhanced to support the ``ansible.builtin.ssh`` - connection options; for further reference refer to the - `SSH plugin`_ documentation. - - * ``zos_job_output`` - - * was updated to correct possible truncated responses for - the **ddname** content. This would occur for jobs with very large amounts - of content from a **ddname**. - * was enhanced to to include the completion code (CC) for each individual - jop step as part of the ``ret_code`` response. - - * ``zos_job_query`` - - * was enhanced to support a 7 digit job number ID for when there are - greater than 99,999 jobs in the history. - * was enhanced to handle when an invalid job ID or job name is used with - the module and returns a proper response. - - * ``zos_job_submit`` - - * was enhanced to fail fast when a submitted job fails instead of waiting - a predetermined time. - * was enhanced to check for 'JCL ERROR' when jobs are submitted and result - in a proper module response. - - * ``zos_operator_action_query`` response messages were improved with more - diagnostic information in the event an error is encountered. - -* Deprecated or removed - - * ``zos_copy`` module option **destination_dataset** has been renamed to - **dest_data_set**. - * ``zos_ssh`` connection plugin has been removed, it is no longer required. - Remove all playbook references, ie ``connection: ibm.ibm_zos_core.zos_ssh``. - * ``zos_ssh`` connection plugin has been removed, it is no longer required. - You must remove the zos_ssh connection plugin from all playbooks that - reference the plugin, for example connection: ibm.ibm_zos_core.zos_ssh. - * ``zos_copy`` module option **model_ds** has been removed. The model_ds logic - is now automatically managed and data sets are either created based on the - ``src`` data set or overridden by the new option ``destination_dataset``. - * ``zos_copy`` and ``zos_fetch`` option **sftp_port** has been deprecated. To - set the SFTP port, use the supported options in the ``ansible.builtin.ssh`` - plugin. Refer to the `SSH port`_ option to configure the port used during - the modules SFTP transport. - -* Documentation - - * Noteworthy documentation updates have been made to: - - * ``zos_copy`` and ``zos_fetch`` about Co:Z SFTP support. - * ``zos_mvs_raw`` removed a duplicate example. - * all action plugins are documented - * update hyperlinks embedded in documentation. - * ``zos_operator`` to explains how to use single quotes in operator commands. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_ -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Known Issues ------------- - -* If a playbook includes the deprecated ``zos_ssh`` connection plugin, for - example ``connection: ibm.ibm_zos_core.zos_ssh``, it will - encounter this error which can corrected by safely removing the plugin: - - .. code-block:: - - "msg": "the connection plugin 'ibm.ibm_zos_core.zos_ssh' was not found" - -* When using the ``zos_ssh`` plugin with **Ansible 2.11** and earlier versions - of this collection, you will encounter the exception: - - .. code-block:: - - AttributeError: module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'. - - This is resolved in this release by deprecating the ``zos_ssh`` connection - plugin and removing all ``connection: ibm.ibm_zos_core.zos_ssh`` references - from playbooks. -* When using module ``zos_copy`` and option ``force`` with ansible versions - greater than **Ansbile 2.10** and earlier versions of this collection, an - unsupported option exception would occur. This is resolved in this release. -* When using the ``zos_copy`` or ``zos_fetch`` modules in earlier versions of - this collection without 'passwordless' SSH configured such that you are using - ``--ask-pass`` or passing an ``ansible_password`` in a configuration; during - the playbook execution a second password prompt for SFTP would appear pausing - the playbook execution. This is resolved in this release. -* When using the ``zos_copy`` or ``zos_fetch`` modules, if you tried to use - Ansible connection options such as ``host_key_checking`` or ``port``, they - were not included as part of the modules execution. This is resolved in this - release by ensuring compatibility with the ``ansible.builtin.ssh`` plugin - options. Refer to the `SSH plugin`_ documentation to enable supported options. -* Known issues for modules can be found in the **Notes** section of a modules - documentation. - - -Deprecation Notices -------------------- -Features and functions are marked as deprecated when they are enhanced and an -alternative is available. In most cases, the deprecated item will remain -available unless the deprecated function interferes with the offering. -Deprecated functions are no longer supported, and will be removed in a future -release. - -.. _SSH plugin: - https://docs.ansible.com/ansible/latest/collections/ansible/builtin/ssh_connection.html - -.. _SSH port: - https://docs.ansible.com/ansible/latest/collections/ansible/builtin/ssh_connection.html#parameter-port - -Version 1.3.6 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * ``zos_copy`` fixes a bug that when a directory is copied from the - controller to the managed node and a mode is set, the mode is now applied - to the directory on the controller. If the directory being copied contains - files and mode is set, mode will only be applied to the files being copied - not the pre-existing files. - * ``zos_copy`` - fixes a bug where options were not defined in the module - argument spec that will result in error when running `ansible-core` v2.11 - and using options `force` or `mode`. - * ``zos_copy`` - was enhanced for when `src` is a directory and ends with "/", - the contents of it will be copied into the root of `dest`. It it doesn't - end with "/", the directory itself will be copied. - * ``zos_fetch`` - fixes a bug where an option was not defined in the module - argument spec that will result in error when running `ansible-core` v2.11 - and using option `encoding`. - * ``zos_job_submit`` - fixes a bug where an option was not defined in the - module argument spec that will result in error when running - `ansible-core` v2.11 and using option `encoding`. - * ``jobs.py`` - fixes a utility used by module `zos_job_output` that would - truncate the DD content. - * ``zos_ssh`` connection plugin was updated to correct a bug that causes - an `ANSIBLE_SSH_CONTROL_PATH_DIR` attribute error only when using - ansible-core v2.11. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 - - `IBM Open Enterprise SDK for Python`_ v3.9.5 -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.3.5 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * ``zos_ssh`` connection plugin was updated to correct a bug in Ansible that - would result in playbook task ``retries`` overriding the SSH connection - ``retries``. This is resolved by renaming the ``zos_ssh`` option - ``retries`` to ``reconnection_retries``. The update addresses users of - ``ansible-core`` v2.9 which continues to use ``retries`` and users of - ``ansible-core`` v2.11 or later which uses ``reconnection_retries``. This - also resolves a bug in the connection that referenced a deprecated - constant. - * ``zos_job_output`` fixes a bug that returned all ddname's when a specific - ddname was provided. Now a specific ddname can be returned and all others - ignored. - * ``zos_copy`` fixes a bug that would not copy subdirectories. If the source - is a directory with sub directories, all sub directories will now be copied. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.3.3 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * ``zos_copy`` was updated to correct deletion of all temporary files and - unwarranted deletes. - - * When the module would complete, a cleanup routine did not take into - account that other processes had open temporary files and thus would - error when trying to remove them. - * When the module would copy a directory (source) from USS to another - USS directory (destination), any files currently in the destination - would be deleted. - The modules behavior has changed such that files are no longer deleted - unless the ``force`` option is set to ``true``. When ``force=true``, - copying files or a directory to a USS destination will continue if it - encounters existing files or directories and overwrite any - corresponding files. - * ``zos_job_query`` was updated to correct a boolean condition that always - evaluated to "CANCELLED". - - * When querying jobs that are either **CANCELLED** or have **FAILED**, - they were always treated as **CANCELLED**. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.3.1 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * Connection plugin ``zos_ssh`` was updated to prioritize the execution of - modules written in REXX over other implementations such is the case for - ``zos_ping``. - * ``zos_ping`` was updated to support Automation Hub documentation - generation. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Known issues ------------- - -* Modules - - * When executing programs using ``zos_mvs_raw``, you may encounter errors - that originate in the implementation of the programs. Two such known issues - are noted below of which one has been addressed with an APAR. - - #. ``zos_mvs_raw`` module execution fails when invoking - Database Image Copy 2 Utility or Database Recovery Utility in conjunction - with FlashCopy or Fast Replication. - #. ``zos_mvs_raw`` module execution fails when invoking DFSRRC00 with parm - "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is - addressed by APAR PH28089. - -Version 1.3.0 -============= - -What's New ----------- - -* Modules - - * ``zos_apf`` - Add or remove libraries to and from Authorized Program Facility (APF). - * ``zos_backup_restore`` - Backup and restore data sets and volumes. - * ``zos_blockinfile`` - Manage block of multi-line textual data on z/OS. - * ``zos_find`` - Find matching data sets. - * ``zos_data_set`` - added support to allocate and format zFS data sets - * ``zos_operator`` - supports new options **wait** and **wait_time_s** such - that you can specify that ``zos_operator`` wait the full **wait_time_s** or - return as soon as the first operator command executes. - * All modules support relative paths and remove choice case sensitivity. - -* Bug Fixes - - * Modules - - * Action plugin ``zos_copy`` was updated to support Python 2.7. - * Module ``zos_copy`` was updated to fail gracefully when a it - encounters a non-zero return code. - * Module ``zos_copy`` was updated to support copying data set members that - are program objects to a PDSE. Prior to this update, copying data set - members would yield an error: - **FSUM8976 Error writing <src_data_set_member> to PDSE member - <dest_data_set_member>** - * Job utility is an internal library used by several modules. It has been - updated to use a custom written parsing routine capable of handling - special characters to prevent job related reading operations from failing - when a special character is encountered. - * Module ``zos_job_submit`` was updated to remove all trailing **\r** from - jobs that are submitted from the controller. - * Module ``zos_job_submit`` referenced a non-existent option and was - corrected to **wait_time_s**. - * Module ``zos_tso_command`` support was added for when the command output - contained special characters. - - * Playbooks - - * Playbook `zos_operator_basics.yaml`_ - has been updated to use `end` in the WTO reply over the previous use of - `cancel`. Using `cancel` is not a valid reply and results in an execution - error. - -* Playbooks - - * In each release, we continue to expand on use cases and deliver them as - playbooks in the `playbook repository`_ that can be easily tailored to any - system. - - * Authorize and - `synchronize APF authorized libraries on z/OS from a configuration file cloned from GitHub`_ - * Automate program execution with - `copy, sort and fetch data sets on z/OS playbook`_. - * Automate user management with add, remove, grant permission, - generate passwords, create zFS, mount zFS and send email - notifications when deployed to Ansible Tower or AWX with the - `manage z/OS Users Using Ansible`_ playbook. - * Use the `configure Python and ZOAU Installation`_ playbook to scan the - **z/OS** target to find the latest supported configuration and generate - `inventory`_ and a `variables`_ configuration. - * Automate software management with `SMP/E Playbooks`_ - * All playbooks have been updated to use our temporary data set feature - to avoid any concurrent data set name problems. - * In the prior release, all sample playbooks previously included with the - collection were migrated to the `playbook repository`_. The - `playbook repository`_ categorizes playbooks into **z/OS concepts** and - **topics**, it also covers `playbook configuration`_ as well as provide - additional community content such as **blogs** and where to open - `support tickets`_ for the playbooks. - -* Documentation - - * All documentation related to `playbook configuration`_ has been - migrated to the `playbook repository`_. Each playbook contains a README - that explains what configurations must be made to run a sample playbook. - * We have been carefully reviewing our users feedback and over time we have - compiled a list of information that we feel would help everyone and have - released this information in our new `FAQs`_. - * Learn about the latest features and experience them before you try - them through the blogs that discuss playbooks, modules, and use cases: - - * `Running Batch Jobs on z/OS using Ansible`_ details how - to write and execute batch jobs without having to deal with JCL. - - * `z/OS User Management With Ansible`_ explains all about the user management - playbook and its optional integration into AWX. - -Availability ------------- - -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Known issues ------------- - -* Modules - - * When executing programs using ``zos_mvs_raw``, you may encounter errors - that originate in the implementation of the programs. Two such known issues - are noted below of which one has been addressed with an APAR. - - #. ``zos_mvs_raw`` module execution fails when invoking - Database Image Copy 2 Utility or Database Recovery Utility in conjunction - with FlashCopy or Fast Replication. - #. ``zos_mvs_raw`` module execution fails when invoking DFSRRC00 with parm - "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is - addressed by APAR PH28089. - .. ............................................................................. .. Global Links .. ............................................................................. From 9cf3fd0f1bf6ae5a882b3bdf32dfd50294148687 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 10 May 2024 22:34:45 -0700 Subject: [PATCH 377/413] Fix module format bug Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_blockinfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 88f410cdb..146cacd61 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -456,7 +456,7 @@ def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=Non cmd = "dmod -b {0} {1} {2} {3}".format(force, encoding, marker, opts) else: - cmd = """dmod -b {0} {1} {2} "//d" {4}""".format(force, encoding, marker, src) + cmd = """dmod -b {0} {1} {2} {3}""".format(force, encoding, marker, src) rc, stdout, stderr = module.run_command(cmd) cmd = clean_command(cmd) From 357706f208afd8be691f967c02b8f9816be042c6 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 11 May 2024 22:31:11 -0700 Subject: [PATCH 378/413] Update the test case to match the expected size Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_mvs_raw_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index cbddd4419..aa01ed952 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -271,7 +271,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch ("cyl", 3, 1, 2549880), ("b", 3, 1, 56664), ("k", 3, 1, 56664), - ("m", 3, 1, 3003192), + ("m", 3, 1, 2889864), ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): From 9ed6077aa97b1cf9d3c1552564103c8fbb3548a7 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 14 May 2024 15:44:01 -0700 Subject: [PATCH 379/413] RST updats so that the support matrix can be publushed to the unified docs Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/index.rst | 11 ++-- docs/source/reference/community.rst | 17 ++++++ docs/source/reference/documentation.rst | 18 +++++++ docs/source/release_notes.rst | 3 ++ .../source/resources/releases_maintenance.rst | 52 +++++++++++++++++++ docs/source/resources/resources.rst | 3 +- 6 files changed, 97 insertions(+), 7 deletions(-) create mode 100644 docs/source/reference/community.rst create mode 100644 docs/source/reference/documentation.rst create mode 100644 docs/source/resources/releases_maintenance.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index c150d27c0..7cea15c03 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -90,10 +90,9 @@ ansible-doc to automate tasks on z/OS. community_guides license +.. toctree:: + :maxdepth: 1 + :caption: Reference - - - - - - + reference/documentation + reference/community diff --git a/docs/source/reference/community.rst b/docs/source/reference/community.rst new file mode 100644 index 000000000..9c09aeeab --- /dev/null +++ b/docs/source/reference/community.rst @@ -0,0 +1,17 @@ +.. ........................................................................... +.. © Copyright IBM Corporation 2020, 2021 . +.. ........................................................................... + +============ +Contributing +============ + +Contributing to collections as a member of the open source community varies for +each collection. Although the collections come together as a unified solution, +each offering operates on its own; therefore, review the individual collections to learn +how to contribute. + +.. toctree:: + :maxdepth: 1 + + z/OS core </../community_guides> \ No newline at end of file diff --git a/docs/source/reference/documentation.rst b/docs/source/reference/documentation.rst new file mode 100644 index 000000000..9e16806b3 --- /dev/null +++ b/docs/source/reference/documentation.rst @@ -0,0 +1,18 @@ +.. ........................................................................... +.. © Copyright IBM Corporation 2024 . +.. ........................................................................... + +============= +Documentation +============= + +In addition to the common reference material included in Helpful Links, +each collection in the **Red Hat® Ansible Certified Content for IBM Z** +includes supplementary documentation specific to the collection. Examples of +such documentation include Web Services APIs, guidelines for development and +testing the modules, offering-specific reading, etc. + +.. toctree:: + :maxdepth: 1 + + z/OS core <../resources/resources> diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 8508178ee..751c8c337 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -39,6 +39,9 @@ Bugfixes Porting Guide ------------- +This section discusses the behavioral changes between ``ibm_zos_core`` v1.9.0 and ``ibm_zos_core`` v1.10.0-beta.1. +It is intended to assist in updating your playbooks so this collection will continue to work. + - ``zos_archive`` - option **terse_pack** no longer accepts uppercase choices, users should replace them with lowercase ones. diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst new file mode 100644 index 000000000..f304cba93 --- /dev/null +++ b/docs/source/resources/releases_maintenance.rst @@ -0,0 +1,52 @@ +======================== +Releases and maintenance +======================== + +This table describes the collections release dates, dependency versions and End of Life dates (EOL). + +The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally each quarter +a beta is released followed by a GA version.We can extend this cycle to properly implement and test larger +changes before a new release is made available. + +These are the component versions available when the collection is made generally available. The underlying +component version is likely to change as they reach EOL, thus components must be a version that is +currently supported. + +For example, if a collection releases with a minimum version of ``ansible-core`` of 2.11.0, and later this +enters into EOL, then a newer version of ansible-core must be used. + +Support Matrix +============== ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ +| Version | Released | ansible-core | Ansible | AAP | End of Life | Control Node | Managed Node | ++=========+===============+==============+=========+=======+===============+====================+============================================================================+ +| 1.10.x | 16 May 2024 | >=2.15.x | >=8.0.x | >=2.4 | 16 May 2026 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ +| 1.9.x | 05 Feb 2024 | >=2.14.x | >=7.0.x | >=2.3 | 05 Feb 2026 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to version 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ +| 1.8.x | 13 Dec 2023 | >=2.14.x | >=7.0.x | >=2.3 | 13 Dec 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to version 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ +| 1.7.x | 10 Oct 2023 | >=2.14.x | >=7.0.x | >=2.3 | 10 Oct 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to version 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ +| 1.6.x | 28 June 2023 | >=2.14.x | >=7.0.x | >=2.3 | 28 June 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to version 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ +| 1.5.x | 25 April 2023 | >=2.14.x | >=7.0.x | >=2.3 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to version 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ \ No newline at end of file diff --git a/docs/source/resources/resources.rst b/docs/source/resources/resources.rst index 8b5951948..8bdb16a6c 100644 --- a/docs/source/resources/resources.rst +++ b/docs/source/resources/resources.rst @@ -1,5 +1,5 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021 . +.. © Copyright IBM Corporation 2024 . .. ........................................................................... ========= @@ -10,3 +10,4 @@ Resources :maxdepth: 1 character_set + releases_maintenance From 6d7e49cf636ec53bee999a8b40f31ad8923e36e1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 14 May 2024 15:48:40 -0700 Subject: [PATCH 380/413] Updated README Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 268 +++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 185 insertions(+), 83 deletions(-) diff --git a/README.md b/README.md index b2345c118..574832333 100644 --- a/README.md +++ b/README.md @@ -1,85 +1,187 @@ -IBM z/OS core collection -======================== - -The **IBM® z/OS® core collection**, also represented as -**ibm_zos_core** in this document, is part of the broader -initiative to bring Ansible Automation to IBM Z® through the offering -**Red Hat® Ansible Certified Content for IBM Z®**. The -**IBM z/OS core collection** supports automation tasks such as -creating data sets, submitting jobs, querying jobs, retrieving job output, -encoding data, fetching data sets, copying data sets, -executing operator commands, executing TSO commands, ping, -querying operator actions, APF authorizing libraries, -editing textual data in data sets or Unix System Services files, -finding data sets, backing up and restoring data sets and -volumes, mounting file systems, running z/OS programs without JCL, -running local and remote scripts on z/OS, initializing volumes, -archiving, unarchiving and templating with Jinja. - - -Red Hat Ansible Certified Content for IBM Z -=========================================== - -**Red Hat® Ansible Certified Content for IBM Z** provides the ability to -connect IBM Z® to clients' wider enterprise automation strategy through the -Ansible Automation Platform ecosystem. This enables development and operations -automation on Z through a seamless, unified workflow orchestration with -configuration management, provisioning, and application deployment in -one easy-to-use platform. - -The **IBM z/OS core collection** is following the -**Red Hat® Ansible Certified Content for IBM Z®** method of distributing -content. Collections will be developed in the open, and when content is ready -for use, it is released to -[Ansible Galaxy](https://galaxy.ansible.com/ui/) -for community adoption. Once contributors review community usage, feedback, -and are satisfied with the content published, the collection will then be -released to [Ansible Automation Hub](https://www.ansible.com/products/automation-hub) -as **certified** and **IBM supported** for -**Red Hat® Ansible Automation Platform subscribers**. - -For guides and reference, please review the [documentation](https://ibm.github.io/z_ansible_collections_doc/index.html). - -Features -======== -The **IBM z/OS core collection**, includes -[connection plugins](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/plugins.html#connection), -[action plugins](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/plugins.html#action), -[modules](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/modules.html), -[filters](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/filters.html), -and ansible-doc to automate tasks on z/OS. - -Ansible version compatibility -============================= -This collection has been tested against **Ansible Core** versions >=2.15. -The Ansible Core versions supported for this collection align to the -[ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the -[Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** -and **ansible-core**. - -For **Ansible Automation Platform** (AAP) users, review the -[Ansible Automation Platform Certified Content](https://access.redhat.com/support/articles/ansible-automation-platform-certified-content) -and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) -for more more information on supported versions of Ansible. - -Other Dependencies -================== -This release of the **IBM z/OS core collection** requires the z/OS managed node have the following: -- [z/OS](https://www.ibm.com/docs/en/zos) -- [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). -- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) -- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) -For specific dependency versions, please review the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) for the version of the IBM Ansible z/OS core installed. - -Copyright -========= -© Copyright IBM Corporation 2020-2024. - -License -======= -Some portions of this collection are licensed under [GNU General Public -License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and -other portions of this collection are licensed under [Apache License, -Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). +# Title of Collection +The **IBM® z/OS® core collection** enables Ansible to interact with z/OS Data Sets and USS files. The collection +focuses on operating system fundamental operations such as managing encodings, creating data sets and submitting +jobs. +### Description +The **IBM® z/OS® core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible Automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, program authorizations, operator operations and execute both JES and MVS commands as well as execute shell, python and REXX scripts. It supports data set creation, searching, copying, fetching and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. + +System programers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. + +## Requirements +Before you install the IBM z/OS core collection, you must configure a control node and managed node with a minimum set of requirements. +The following [table](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html) details the specific software requirements for the controller and managed Node. + +### Ansible Controller +* This release of the collection requires **ansible-core >=2.15** (Ansible >=8.x), for additional requirements such as Python, review the [support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). + +### Managed Node +This release of the collection requires the following: +* [z/OS](https://www.ibm.com/docs/en/zos) V2R4 (or later) but prior to version V3R1. +* [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). +* [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. +* [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau) 1.3.0 or later. + +### Installation +Before using this collection, you need to install it with the Ansible Galaxy command-line tool: + +``` +ansible-galaxy collection install ibm.ibm_zos_core +``` + +You can also include it in a requirements.yml file and install it with ansible-galaxy collection install -r requirements.yml, using the format: + +``` +collections: + - name: ibm.ibm_zos_core +``` + +Note that if you install the collection from Ansible Galaxy, it will not be upgraded automatically when you upgrade the Ansible package. + +To upgrade the collection to the latest available version, run the following command: + +``` +ansible-galaxy collection install ibm.ibm_zos_core --upgrade +``` + +You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0: + +``` +ansible-galaxy collection install ibm.ibm_zos_core:1.0.0 +``` + +You can also install a beta version of the collection. A Beta version is only available on Galaxy and is only supported by community until it is promoted to Ansible Automation Platform. Use the following syntax to install a beta version: + +``` +ansible-galaxy collection install ibm.ibm_zos_core:1.10.0-beta.1 +``` + +As part of the installation, the collection [requirements](#Requirements) must be made available to Ansible through the use of [environment variables](https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/zos_core/configuration_guide.md#environment-variables). The preferred configuration is to place the environment variables in `group_vars` and `host_vars`, you can find examples of this configuration under any [playbook project](https://github.com/IBM/z_ansible_collections_samples), for example, review the **data set** example [configuration](https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_concepts/data_sets/data_set_basics#configuration) documentation. + +If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657). + +The environment variables: +``` +PYZ: "path_to_python_installation_on_zos_target" +ZOAU: "path_to_zoau_installation_on_zos_target" + +ansible_python_interpreter: "{{ PYZ }}/bin/python3" + +environment_vars: + _BPXK_AUTOCVT: "ON" + ZOAU_HOME: "{{ ZOAU }}" + PYTHONPATH: "{{ ZOAU }}/lib" + LIBPATH: "{{ ZOAU }}/lib:{{ PYZ }}/lib:/lib:/usr/lib:." + PATH: "{{ ZOAU }}/bin:{{ PYZ }}/bin:/bin:/var/bin" + _CEE_RUNOPTS: "FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)" + _TAG_REDIR_ERR: "txt" + _TAG_REDIR_IN: "txt" + _TAG_REDIR_OUT: "txt" + LANG: "C" + PYTHONSTDINENCODING: "cp1047" +``` + +## Use Cases +This section should outline in detail 3-5 common use cases for the collection. These should be informative examples of how the collection has been used, or how you'd like to see it be used. + +* Use Case Name: Add a new z/OS User + * Actors: + * Application Developer + * Description: + * An application developer can submit a new user request for the system admin to approve. + * Flow: + * Verify user does not exist; create home directory, password and passphrase + * Create home directory and the user to the system + * Provide access to resource, add to system groups and define an alias + * Create the users ISPROF data set + * Create user private data set, mount with persistance + * Generate email with login credentials +* Use Case Name: Automate certificate renewals + * Actors: + * System Admin + * Description: + * The system admin can automate certificate renewals, no longer requiring manual intervention. + * Flow: + * Setup and configure and run z/OS Health Checker to generate a report + * Search the Health Checker report for expiring certificates + * Renew expiring certificates + * Collect expiring certificate attributes and backup certificate + * Replicate certificate with a new label + * Generate signing request and sign new certificate + * Supersede the old with the new certificate + * Delete old certificate and relabel new certificate with previous certificate name +* Use Case Name: Provision a Liberty Profile Instance + * Actors: + * Application Developer + * Description: + * An application developer can provision an application runtime that accelerates the delivery of cloud-native applications, + * Flow: + * Create and mount a file system for the Liberty profile. + * Creating a Liberty Profile instance with optional configurations. + * Enabling z/OS authorized services for the Liberty profile. + * Starting an angel process or a server process + +### Testing +All releases, including beta's will have: +* 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. +* 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of [ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests). +* 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst) analyzes +* 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives. + +### Environments +This release of the collection was tested with: +* ansible-core v2.15.x +* Python 3.9.x +* IBM Open Enterprise SDK for Python 3.11.x +* IBM Z Open Automation Utilities (ZOAU) 1.3.0.x +* z/OS V2R5 + +### Known Exceptions +This release of the collection has no known exceptions or workarounds, but this release does introduce case sensitivity for option values. This release includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values should be migrated to ensure case sensitivity does not affect a module. + +## Contributing +This community is not currently accepting contributions. However, we encourage you to open [git issues](https://github.com/ansible-collections/ibm_zos_core/issues) for bugs, comments or feature requests and check back periodically for when community contributions will be accepted in the near future. + +Review the [development docs](https://ibm.github.io/z_ansible_collections_doc/zhmc-ansible-modules/docs/source/development.html#development) to learn how you can create an environment and test the collections modules. + +### Communicating with the IBM z/OS core community +If you would like to communicate with this community, you can do so through: +* GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). +* GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). +* [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. +* Discord [System Z Enthusiasts](https://forum.ansible.com/) room [Ansible](https://discord.gg/nKC8F89v). +* Matrix Ansible room [Ansible z/OS](#ansible-zos:matrix.org). +* Ansible community [Matrix rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). + +## Support +As Red Hat Ansible [Certified Content](https://catalog.redhat.com/software/search?target_platforms=Red%20Hat%20Ansible%20Automation%20Platform), this collection is entitled to [support](https://access.redhat.com/support/) through [Ansible Automation platform](https://www.redhat.com/en/technologies/management/ansible) (AAP). After creating a Red Hat support case, if it is decided the issue belongs to IBM, Red Hat will ask that [an IBM support case](https://www.ibm.com/mysupport/s/createrecord/NewCase) be created and share the case number with Red Hat so that a collaboration can begin between Red Hat and IBM. + +If a support case can not be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible). + +The current supported versions of this collection can be found listed under the [release section](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + +## Release Notes and Roadmap +The collections cumulative release notes can be reviewed [here](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). Note, some collections release before an ansible-core version reaches End of Life (EOL), thus the version of ansible-core that is supported must be a version that is currently supported. + +For AAP users, to see the supported ansible-core versions, review the [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform). + +For Galaxy and GitHub users, to see the supported ansible-core versions, review the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). + +The collections changelogs can be reviewed in the following table. +| Version | ansible-core | Ansible | Status | +|---------|--------------|---------|----------------------------| +| 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| +| 1.10.x | >=2.15.x | >=8.0.x | 16 May 2024 | +| [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 | +| [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 | +| [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 | +| [1.6.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 28 June 2023 | +| [1.5.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 25 April 2023 | + +## Related Information +Example playbooks and use cases can be be found in the [z/OS playbook repository](https://github.com/IBM/z_ansible_collections_samples). +Supplemental content on getting started with Ansible, architecture and use cases is available [here](https://ibm.github.io/z_ansible_collections_doc/reference/helpful_links.html). + +## License Information +Some portions of this collection are licensed under [GNU General Public License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and other portions of this collection are licensed under [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). See individual files for applicable licenses. \ No newline at end of file From 22a7a03feef641e42144e4953bfe9fc57e614f03 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 14 May 2024 15:52:48 -0700 Subject: [PATCH 381/413] Added Readme changelog link Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 574832333..e8f8f2879 100644 --- a/README.md +++ b/README.md @@ -171,7 +171,7 @@ The collections changelogs can be reviewed in the following table. | Version | ansible-core | Ansible | Status | |---------|--------------|---------|----------------------------| | 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| -| 1.10.x | >=2.15.x | >=8.0.x | 16 May 2024 | +| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | May 2024 | | [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 | | [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 | | [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 | From 0b12772164500e700e6357a4b7b1cceda3ef7589 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 14 May 2024 16:49:48 -0700 Subject: [PATCH 382/413] Delete changelog fragements since changelog has been generated Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 101 +++++++++++++++-- .../fragments/1032-clean-job_submit-test.yml | 3 - ...-lineinfile-remove-zos_copy-dependency.yml | 3 - .../1156-zos_archive-remove-zos_copy_dep.yml | 3 - ...-remove-zos-copy-from-zos-encode-tests.yml | 3 - ...165-remove-zos-copy-dep-from-zos-fetch.yml | 3 - ...ve-zos-copy-from-zos-blockinfile-tests.yml | 3 - .../1169-util-job-zoau-migration.yml | 3 - ...e-zos_encode-from_zos_lineinfile-tests.yml | 3 - .../1181-zoau-migration-zos_operator.yml | 4 - .../1182-migrate-module-utils-data-set.yml | 3 - changelogs/fragments/1183-copy-members.yml | 3 - ...184-remove-zos-fetch-dep-from-zos-copy.yml | 3 - .../1187-migrate-module-utils-copy.yml | 3 - .../1188-migrate-module_utils-backup.yml | 3 - .../1189-migrate-module_utils-encode.yml | 3 - ...1190-migrate-module_utils-dd_statement.yml | 3 - .../1196-zoau-migration-zos_gather_facts.yml | 4 - .../1202-doc-gen-script-portability.yml | 4 - changelogs/fragments/1204-migrate-zos_apf.yml | 12 -- .../1209-zoau-migration-zos_job_submit.yml | 3 - ...1215-Migrate_zos_operator_action_query.yml | 4 - ...lidate_module_zos_job_output_migration.yml | 3 - .../fragments/1217-validate-job-query.yml | 3 - .../fragments/1218-migrate-zos_encode.yml | 3 - ...20-bugfix-zos_job_submit-default_value.yml | 4 - .../1222-zoau-migration-zos_copy.yml | 3 - .../fragments/1227-migrate-zos_archive.yml | 3 - ...228-zos_find-remove-zos_lineinfile_dep.yml | 3 - .../fragments/1229-migrate-zos_fetch.yml | 3 - .../fragments/1237-migrate-zos_mount.yml | 4 - .../fragments/1238-migrate-zos_unarchive.yml | 3 - .../1242-zoau-migration-zos_data_set.yml | 3 - ...Migrate_zos_blockinfile_and_lineinfile.yml | 4 - .../fragments/1257-zoau-import-zos_apf.yml | 3 - .../1261-job-submit-non-utf8-chars.yml | 9 -- .../1265_Migrate_zos_backup_restore.yml | 7 -- ...0-quick-fix-len-of-volumes-work-around.yml | 5 - ...-update-zos_archive-zos_unarchive-docs.yml | 5 - .../1292-doc-zos_tso_command-example.yml | 4 - .../fragments/1295-doc-zos_ping-scp.yml | 7 -- ...98-Remove_local_charset_from_zos_fetch.yml | 3 - .../fragments/1307-update-sanity-zos_copy.yml | 10 -- .../1320-Zos_mvs_raw_ignores_tmp_hlq.yml | 5 - .../1322-update-docstring-encode.yml | 3 - .../1331-update-docstring-ickdsf.yml | 3 - .../1332-update-docstring-import_handler.yml | 3 - .../fragments/1333-update-docstring-job.yml | 3 - .../1336-update-docstring-validation.yml | 3 - .../1340-Work_around_fix_false_positive.yml | 4 - ...re_than_0_doesn_not_put_change_as_true.yml | 5 - .../1347-update-docstring-zos_data_set.yml | 3 - .../1348-update-docstring-zos_encode.yml | 3 - .../1349-update-docstring-zos_fetch.yml | 3 - .../1350-update-docstring-zos_find.yml | 3 - ...1351-update-docstring-zos_gather_facts.yml | 3 - .../1352-update-docstring-zos_job_output.yml | 3 - .../1353-update-docstring-zos_job_query.yml | 3 - .../1354-update-docstring-zos_job_submit.yml | 3 - .../1355-update-docstring-zos_lineinfile.yml | 3 - .../1356-update-docstring-zos_mount.yml | 3 - .../fragments/1388-lowercase-choices.yml | 106 ------------------ .../1390-update-docstring-zos_script.yml | 3 - .../1391-update-docstring-zos_tso_command.yml | 3 - .../1392-update-docstring-zos_volume_init.yml | 3 - .../1393-update-docstring-zos_apf.yml | 3 - ...te_docstring-zos_operator_action_query.yml | 3 - .../fragments/1443-zos_find-filter-size.yml | 4 - .../fragments/692-changelog-lint-ac-tool.yml | 8 -- .../971-bug-job_submit-can-stacktrace.yml | 6 - changelogs/fragments/992-fix-sanity4to6.yml | 7 -- .../fragments/v1.10.0-beta.1_summary.yml | 6 - 72 files changed, 89 insertions(+), 391 deletions(-) delete mode 100644 changelogs/fragments/1032-clean-job_submit-test.yml delete mode 100644 changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml delete mode 100644 changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml delete mode 100644 changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml delete mode 100644 changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml delete mode 100644 changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml delete mode 100644 changelogs/fragments/1169-util-job-zoau-migration.yml delete mode 100644 changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml delete mode 100644 changelogs/fragments/1181-zoau-migration-zos_operator.yml delete mode 100644 changelogs/fragments/1182-migrate-module-utils-data-set.yml delete mode 100644 changelogs/fragments/1183-copy-members.yml delete mode 100644 changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml delete mode 100644 changelogs/fragments/1187-migrate-module-utils-copy.yml delete mode 100644 changelogs/fragments/1188-migrate-module_utils-backup.yml delete mode 100644 changelogs/fragments/1189-migrate-module_utils-encode.yml delete mode 100644 changelogs/fragments/1190-migrate-module_utils-dd_statement.yml delete mode 100644 changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml delete mode 100644 changelogs/fragments/1202-doc-gen-script-portability.yml delete mode 100644 changelogs/fragments/1204-migrate-zos_apf.yml delete mode 100644 changelogs/fragments/1209-zoau-migration-zos_job_submit.yml delete mode 100644 changelogs/fragments/1215-Migrate_zos_operator_action_query.yml delete mode 100644 changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml delete mode 100644 changelogs/fragments/1217-validate-job-query.yml delete mode 100644 changelogs/fragments/1218-migrate-zos_encode.yml delete mode 100644 changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml delete mode 100644 changelogs/fragments/1222-zoau-migration-zos_copy.yml delete mode 100644 changelogs/fragments/1227-migrate-zos_archive.yml delete mode 100644 changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml delete mode 100644 changelogs/fragments/1229-migrate-zos_fetch.yml delete mode 100644 changelogs/fragments/1237-migrate-zos_mount.yml delete mode 100644 changelogs/fragments/1238-migrate-zos_unarchive.yml delete mode 100644 changelogs/fragments/1242-zoau-migration-zos_data_set.yml delete mode 100644 changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml delete mode 100644 changelogs/fragments/1257-zoau-import-zos_apf.yml delete mode 100644 changelogs/fragments/1261-job-submit-non-utf8-chars.yml delete mode 100644 changelogs/fragments/1265_Migrate_zos_backup_restore.yml delete mode 100644 changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml delete mode 100644 changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml delete mode 100644 changelogs/fragments/1292-doc-zos_tso_command-example.yml delete mode 100644 changelogs/fragments/1295-doc-zos_ping-scp.yml delete mode 100644 changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml delete mode 100644 changelogs/fragments/1307-update-sanity-zos_copy.yml delete mode 100644 changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml delete mode 100644 changelogs/fragments/1322-update-docstring-encode.yml delete mode 100644 changelogs/fragments/1331-update-docstring-ickdsf.yml delete mode 100644 changelogs/fragments/1332-update-docstring-import_handler.yml delete mode 100644 changelogs/fragments/1333-update-docstring-job.yml delete mode 100644 changelogs/fragments/1336-update-docstring-validation.yml delete mode 100644 changelogs/fragments/1340-Work_around_fix_false_positive.yml delete mode 100644 changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml delete mode 100644 changelogs/fragments/1347-update-docstring-zos_data_set.yml delete mode 100644 changelogs/fragments/1348-update-docstring-zos_encode.yml delete mode 100644 changelogs/fragments/1349-update-docstring-zos_fetch.yml delete mode 100644 changelogs/fragments/1350-update-docstring-zos_find.yml delete mode 100644 changelogs/fragments/1351-update-docstring-zos_gather_facts.yml delete mode 100644 changelogs/fragments/1352-update-docstring-zos_job_output.yml delete mode 100644 changelogs/fragments/1353-update-docstring-zos_job_query.yml delete mode 100644 changelogs/fragments/1354-update-docstring-zos_job_submit.yml delete mode 100644 changelogs/fragments/1355-update-docstring-zos_lineinfile.yml delete mode 100644 changelogs/fragments/1356-update-docstring-zos_mount.yml delete mode 100644 changelogs/fragments/1388-lowercase-choices.yml delete mode 100644 changelogs/fragments/1390-update-docstring-zos_script.yml delete mode 100644 changelogs/fragments/1391-update-docstring-zos_tso_command.yml delete mode 100644 changelogs/fragments/1392-update-docstring-zos_volume_init.yml delete mode 100644 changelogs/fragments/1393-update-docstring-zos_apf.yml delete mode 100644 changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml delete mode 100644 changelogs/fragments/1443-zos_find-filter-size.yml delete mode 100644 changelogs/fragments/692-changelog-lint-ac-tool.yml delete mode 100644 changelogs/fragments/971-bug-job_submit-can-stacktrace.yml delete mode 100644 changelogs/fragments/992-fix-sanity4to6.yml delete mode 100644 changelogs/fragments/v1.10.0-beta.1_summary.yml diff --git a/ac b/ac index 9aee6a02d..0f5bc07b4 100755 --- a/ac +++ b/ac @@ -34,6 +34,7 @@ normalize_version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; } +# Method determines the lastest (highest number) version venv that is managed by ./ac latest_venv(){ dir_version_latest="0" test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` @@ -48,6 +49,33 @@ latest_venv(){ fi } +# Method will take a venv name such as venv-2.16 and validate that it exists +validate_venv(){ + option_venv=$1 + #test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` + + if [[ "$option_venv" =~ "latest" ]]; then + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-latest* 2>/dev/null` + if [[ "$test_for_managed_venv" =~ "latest" ]]; then + dir_version_latest=$option_venv + fi + #elif [ ! -z "$test_for_managed_venv" ]; then + else + for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | rev | cut -d"/" -f1| rev`; do + if [ $dir_version == $option_venv ]; then + dir_version_latest=$dir_version + fi + done + fi + + if [ ! -z "$dir_version_latest" ]; then + echo "${VENV_HOME_MANAGED}"/$dir_version_latest + else + echo "Unable to validate managed venv option $option_venv, exiting." + exit + fi +} + VENV=`latest_venv` file="" @@ -303,6 +331,18 @@ ac_install(){ fi } +# Run a make module doc +# ------------------------------------------------------------------------------ +#->ac-module-doc: +## Runs make module-doc to generate the module documentation +## Usage: ac [--ac-module-doc] +## Example: +## $ ac --ac-module-doc +ac_module_doc(){ + message "Running make module-doc" + . $VENV_BIN/activate && cd docs/ && make module-doc +} + # ------------------------------------------------------------------------------ # Run ansible-lint on the locally checked out GH Branch # ------------------------------------------------------------------------------ @@ -635,11 +675,24 @@ venv_setup(){ # TODO: Allow user to specify which venv they can start # ------------------------------------------------------------------------------ #->venv-start: -## Activate the lastest ansible managed virtual environment. -## Usage: ac [--venv-start] +## Activate the latest ansible managed virtual environment or optionally start +## by its name. +## Usage: ac [--venv-start --name <venv name>] ## Example: +## $ ac --venv-start --name venv-2.16 ## $ ac --venv-start venv_start(){ + option_name=$1 + + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + + if [ ! -z "$VENV" ]; then + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` + fi + fi + message "Starting managed python virtual environment: $VENV_BASENAME" #. $VENV_BIN/activate; exec /bin/sh -i /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i" @@ -650,14 +703,27 @@ venv_start(){ # TODO: Allow user to specify which venv they can stop # ------------------------------------------------------------------------------ #->venv-stop: -## Deactivate the lastest ansible managed virtual environment. -## Usage: ac [--venv-stop] +## Deactivate the latest ansible managed virtual environment or optionally deactivate +## by its name. +## Usage: ac [--venv-stop --name <venv name>]] ## Example: +## $ ac --venv-stop --name venv-2.16 ## $ ac --venv-stop venv_stop(){ + option_name=$1 + + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + + if [ ! -z "$VENV" ]; then + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` + fi + fi + message "Stopping managed ansible virtual environment located at: $VENV_BASENAME" message "ac --venv-stop does not actually currently work, use CNTL-D" - . deactivate $VENV_BASENAME; + . deactivate $VENV_BASENAME 2>/dev/null; } # ============================================================================== @@ -691,14 +757,18 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; - --ac-galaxy-importer) # Command + --ac-galaxy-importer) # Command ensure_managed_venv_exists $1 option_submitted="--ac-galaxy-importer" ;; - --ac-changelog) # Command + --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" ;; + --ac-module-doc) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-module-doc" + ;; --ac-install) ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" @@ -762,7 +832,7 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--venv-stop" ;; - --command|--command=?*) # option + --command|--command=?*) # option command=`option_processor $1 $2` option_sanitize $command shift @@ -777,7 +847,7 @@ while true; do option_sanitize $file shift ;; - --host|--host=?*) # option + --host|--host=?*) # option host=`option_processor $1 $2` option_sanitize $host shift @@ -787,6 +857,11 @@ while true; do option_sanitize $level shift ;; + --name|--name=?*) # option + name=`option_processor $1 $2` + option_sanitize $name + shift + ;; --out-file|--out-file=?*) # option out_file=`option_processor $1 $2` option_sanitize $out_file @@ -855,6 +930,8 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then ac_changelog $command +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-module-doc" ] ; then + ac_module_doc elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then @@ -882,7 +959,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then venv_setup $password elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then - venv_start + venv_start $name elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-stop" ] ; then - venv_stop -fi + venv_stop $name +fi \ No newline at end of file diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml deleted file mode 100644 index bb4248aec..000000000 --- a/changelogs/fragments/1032-clean-job_submit-test.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. - (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml deleted file mode 100644 index 44015bbd9..000000000 --- a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - remove zos_copy calls from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1152). diff --git a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml deleted file mode 100644 index ea8aacee9..000000000 --- a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - Remove zos_copy dependency from zos_archive test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1156). \ No newline at end of file diff --git a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml deleted file mode 100644 index 24f2802d5..000000000 --- a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - Remove zos_copy dependency from zos_encode test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1157). diff --git a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml deleted file mode 100644 index 9c8593c1a..000000000 --- a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove zos_copy dependency from zos_fetch test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1165). diff --git a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml deleted file mode 100644 index d7fb725af..000000000 --- a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_blockinfile - Remove zos_copy dependency from zos_blockinfile test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1167). \ No newline at end of file diff --git a/changelogs/fragments/1169-util-job-zoau-migration.yml b/changelogs/fragments/1169-util-job-zoau-migration.yml deleted file mode 100644 index 568aa9a4e..000000000 --- a/changelogs/fragments/1169-util-job-zoau-migration.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/job.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1169). diff --git a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml deleted file mode 100644 index a95e1c7e2..000000000 --- a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - Remove zos_encode dependency from zos_lineinfile test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1179). \ No newline at end of file diff --git a/changelogs/fragments/1181-zoau-migration-zos_operator.yml b/changelogs/fragments/1181-zoau-migration-zos_operator.yml deleted file mode 100644 index 7c107de88..000000000 --- a/changelogs/fragments/1181-zoau-migration-zos_operator.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_operator - Update internal functions to account for the change to the - unit of measurement of `timeout` now in centiseconds. - (https://github.com/ansible-collections/ibm_zos_core/pull/1181). \ No newline at end of file diff --git a/changelogs/fragments/1182-migrate-module-utils-data-set.yml b/changelogs/fragments/1182-migrate-module-utils-data-set.yml deleted file mode 100644 index 857327254..000000000 --- a/changelogs/fragments/1182-migrate-module-utils-data-set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/data_set.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1182). diff --git a/changelogs/fragments/1183-copy-members.yml b/changelogs/fragments/1183-copy-members.yml deleted file mode 100644 index b0b0c7896..000000000 --- a/changelogs/fragments/1183-copy-members.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. - (https://github.com/ansible-collections/ibm_zos_core/pull/1183). diff --git a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml deleted file mode 100644 index 9085743d9..000000000 --- a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Remove zos_fetch dependency from zos_copy test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1184). diff --git a/changelogs/fragments/1187-migrate-module-utils-copy.yml b/changelogs/fragments/1187-migrate-module-utils-copy.yml deleted file mode 100644 index 26157f9fc..000000000 --- a/changelogs/fragments/1187-migrate-module-utils-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/copy.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1187). diff --git a/changelogs/fragments/1188-migrate-module_utils-backup.yml b/changelogs/fragments/1188-migrate-module_utils-backup.yml deleted file mode 100644 index 65945d06b..000000000 --- a/changelogs/fragments/1188-migrate-module_utils-backup.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/backup.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1188). \ No newline at end of file diff --git a/changelogs/fragments/1189-migrate-module_utils-encode.yml b/changelogs/fragments/1189-migrate-module_utils-encode.yml deleted file mode 100644 index d7f471847..000000000 --- a/changelogs/fragments/1189-migrate-module_utils-encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/encode.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1189). diff --git a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml deleted file mode 100644 index 4bb3a582d..000000000 --- a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/dd_statement.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1190). diff --git a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml deleted file mode 100644 index 03f39b535..000000000 --- a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_gather_facts - Update module internally to leverage ZOAU python API - for zinfo. - (https://github.com/ansible-collections/ibm_zos_core/pull/1196). \ No newline at end of file diff --git a/changelogs/fragments/1202-doc-gen-script-portability.yml b/changelogs/fragments/1202-doc-gen-script-portability.yml deleted file mode 100644 index 3c2e6ddbb..000000000 --- a/changelogs/fragments/1202-doc-gen-script-portability.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - docs/scripts - Change to sed "-i" in place option which ensures compatibility between MacOS - and GNU versions of sed command. - (https://github.com/ansible-collections/ibm_zos_core/pull/1202). diff --git a/changelogs/fragments/1204-migrate-zos_apf.yml b/changelogs/fragments/1204-migrate-zos_apf.yml deleted file mode 100644 index 89db1abd2..000000000 --- a/changelogs/fragments/1204-migrate-zos_apf.yml +++ /dev/null @@ -1,12 +0,0 @@ -bugfixes: - - zos_apf - List option only returned one data set. Fix now returns - the list of retrieved data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - -minor_changes: - - zos_apf - Enhanced error messages when an exception is caught. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - -trivial: - - zos_apf - Migrated the module to use ZOAU v1.3.0 json schema. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). \ No newline at end of file diff --git a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml deleted file mode 100644 index 6f58e2713..000000000 --- a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Migrated the module to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1209). \ No newline at end of file diff --git a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml deleted file mode 100644 index be18056b3..000000000 --- a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_operator_action_query - Update internal functions to account for the change to the - unit of measurement of `timeout` now in centiseconds. - (https://github.com/ansible-collections/ibm_zos_core/pull/1215). \ No newline at end of file diff --git a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml deleted file mode 100644 index 65d3d3c08..000000000 --- a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_output - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1216). diff --git a/changelogs/fragments/1217-validate-job-query.yml b/changelogs/fragments/1217-validate-job-query.yml deleted file mode 100644 index df97c3ca6..000000000 --- a/changelogs/fragments/1217-validate-job-query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_query - Removed zos_job_submit wait argument from tests. - (https://github.com/ansible-collections/ibm_zos_core/pull/1217). \ No newline at end of file diff --git a/changelogs/fragments/1218-migrate-zos_encode.yml b/changelogs/fragments/1218-migrate-zos_encode.yml deleted file mode 100644 index 3d712b749..000000000 --- a/changelogs/fragments/1218-migrate-zos_encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1218). diff --git a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml deleted file mode 100644 index 83d2391ba..000000000 --- a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_job_submit - Was ignoring the default value for location=DATA_SET, now - when location is not specified it will default to DATA_SET. - (https://github.com/ansible-collections/ibm_zos_core/pull/1220). \ No newline at end of file diff --git a/changelogs/fragments/1222-zoau-migration-zos_copy.yml b/changelogs/fragments/1222-zoau-migration-zos_copy.yml deleted file mode 100644 index edc6eec06..000000000 --- a/changelogs/fragments/1222-zoau-migration-zos_copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Migrated the module to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1222). \ No newline at end of file diff --git a/changelogs/fragments/1227-migrate-zos_archive.yml b/changelogs/fragments/1227-migrate-zos_archive.yml deleted file mode 100644 index 820593c95..000000000 --- a/changelogs/fragments/1227-migrate-zos_archive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1227). diff --git a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml deleted file mode 100644 index 67642d563..000000000 --- a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_find - Removed zos_lineinfile dependency from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1228). diff --git a/changelogs/fragments/1229-migrate-zos_fetch.yml b/changelogs/fragments/1229-migrate-zos_fetch.yml deleted file mode 100644 index 07f9a26b4..000000000 --- a/changelogs/fragments/1229-migrate-zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1229). diff --git a/changelogs/fragments/1237-migrate-zos_mount.yml b/changelogs/fragments/1237-migrate-zos_mount.yml deleted file mode 100644 index d4787d42d..000000000 --- a/changelogs/fragments/1237-migrate-zos_mount.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - tests/functional/modules/test_zos_mount_func.py - migrate code to use - ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1237). diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml deleted file mode 100644 index 8afe97d29..000000000 --- a/changelogs/fragments/1238-migrate-zos_unarchive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1238). \ No newline at end of file diff --git a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml deleted file mode 100644 index 851783900..000000000 --- a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_data_set - Refactor data_set module_util and functional tests for ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1242). \ No newline at end of file diff --git a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml deleted file mode 100644 index e2e841e9c..000000000 --- a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_lineinfile - migrate code to use ZOAU v1.3.0. - - zos_blockinfile - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1256). diff --git a/changelogs/fragments/1257-zoau-import-zos_apf.yml b/changelogs/fragments/1257-zoau-import-zos_apf.yml deleted file mode 100644 index 71b46ba1b..000000000 --- a/changelogs/fragments/1257-zoau-import-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated ZOAU imports from the module to capture traceback. - (https://github.com/ansible-collections/ibm_zos_core/pull/1257). diff --git a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml deleted file mode 100644 index 7f322afe4..000000000 --- a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml +++ /dev/null @@ -1,9 +0,0 @@ -bugfixes: - - module_utils/job.py - job output containing non-printable characters would - crash modules. Fix now handles the error gracefully and returns a message - to the user inside `content` of the `ddname` that failed. - (https://github.com/ansible-collections/ibm_zos_core/pull/1261). -trivial: - - zos_job_submit - add test case to validate a bugfix in ZOAU v1.3.0 that - handles non-UTF8 characters correctly in a job's output. - (https://github.com/ansible-collections/ibm_zos_core/pull/1261). \ No newline at end of file diff --git a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml deleted file mode 100644 index 9afe4afc3..000000000 --- a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_backup_restore - Refactor zos_backup_restore module and functional tests for ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1265). -minor_changes: - - zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier - (HLQ) for temporary and backup. - (https://github.com/ansible-collections/ibm_zos_core/pull/1265). \ No newline at end of file diff --git a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml deleted file mode 100644 index 1f6ba201d..000000000 --- a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - module_utils/data_set.py - len(volme) was always called on receiving - DatasetVerificationError from Dataset.create() even though volumes=None was - a valid possible outcome. The fix adds a null check to the conditional. - (https://github.com/ansible-collections/ibm_zos_core/pull/1270). \ No newline at end of file diff --git a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml deleted file mode 100644 index ef213b06f..000000000 --- a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - zos_archive - Updated examples to use path instead of src. - (https://github.com/ansible-collections/ibm_zos_core/pull/1286). - - zos_unarchive - Updated examples and return dict to use path instead of src. - (https://github.com/ansible-collections/ibm_zos_core/pull/1286). \ No newline at end of file diff --git a/changelogs/fragments/1292-doc-zos_tso_command-example.yml b/changelogs/fragments/1292-doc-zos_tso_command-example.yml deleted file mode 100644 index 6ed868be7..000000000 --- a/changelogs/fragments/1292-doc-zos_tso_command-example.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_tso_command - Added an example on how to chain multiple TSO commands such - that they are invoked together when dependent on each other. - (https://github.com/ansible-collections/ibm_zos_core/pull/1293). \ No newline at end of file diff --git a/changelogs/fragments/1295-doc-zos_ping-scp.yml b/changelogs/fragments/1295-doc-zos_ping-scp.yml deleted file mode 100644 index a9477150d..000000000 --- a/changelogs/fragments/1295-doc-zos_ping-scp.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_ping - Update zos_ping documentation to instruct users how - to fall back to legacy SCP when using OpenSSH 9.0 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/1295). - - zos_ping - Update zos_ping REXX source to check for python - version 3.10 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/1295). \ No newline at end of file diff --git a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml deleted file mode 100644 index ca1ea840e..000000000 --- a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove argument not documented. - (https://github.com/ansible-collections/ibm_zos_core/pull/1298). \ No newline at end of file diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml deleted file mode 100644 index 858f0b64c..000000000 --- a/changelogs/fragments/1307-update-sanity-zos_copy.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_copy - Documented `group` and `owner` options. - (https://github.com/ansible-collections/ibm_zos_core/pull/1307). - -trivial: - - zos_copy - Removed many of the variables that were passed from the - action plugin to the module, reimplementing the logic inside the - module instead. Removed the use of temp_path variable inside zos_copy - in favor of using remote_src to deal with files copied to remote. - (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml deleted file mode 100644 index 058faf66e..000000000 --- a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. - Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created - during the module execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/changelogs/fragments/1322-update-docstring-encode.yml b/changelogs/fragments/1322-update-docstring-encode.yml deleted file mode 100644 index dd5eb5389..000000000 --- a/changelogs/fragments/1322-update-docstring-encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - encode - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1322). \ No newline at end of file diff --git a/changelogs/fragments/1331-update-docstring-ickdsf.yml b/changelogs/fragments/1331-update-docstring-ickdsf.yml deleted file mode 100644 index 545ba95c1..000000000 --- a/changelogs/fragments/1331-update-docstring-ickdsf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - ickdsf - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1331). \ No newline at end of file diff --git a/changelogs/fragments/1332-update-docstring-import_handler.yml b/changelogs/fragments/1332-update-docstring-import_handler.yml deleted file mode 100644 index 5b32cd32e..000000000 --- a/changelogs/fragments/1332-update-docstring-import_handler.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - import_handler - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1332). \ No newline at end of file diff --git a/changelogs/fragments/1333-update-docstring-job.yml b/changelogs/fragments/1333-update-docstring-job.yml deleted file mode 100644 index 124ef2cae..000000000 --- a/changelogs/fragments/1333-update-docstring-job.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - job - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1333). \ No newline at end of file diff --git a/changelogs/fragments/1336-update-docstring-validation.yml b/changelogs/fragments/1336-update-docstring-validation.yml deleted file mode 100644 index 547103d46..000000000 --- a/changelogs/fragments/1336-update-docstring-validation.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - validation - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1336). \ No newline at end of file diff --git a/changelogs/fragments/1340-Work_around_fix_false_positive.yml b/changelogs/fragments/1340-Work_around_fix_false_positive.yml deleted file mode 100644 index 8e8360808..000000000 --- a/changelogs/fragments/1340-Work_around_fix_false_positive.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_blockinfile - Using double quotation marks inside a block resulted in a false - positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. - (https://github.com/ansible-collections/ibm_zos_core/pull/1340). \ No newline at end of file diff --git a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml deleted file mode 100644 index a09b8fa64..000000000 --- a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_job_submit - when the argument max_rc was different than 0 the changed response returned - as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above - or equal to the value of the job. - (https://github.com/ansible-collections/ibm_zos_core/pull/1345). \ No newline at end of file diff --git a/changelogs/fragments/1347-update-docstring-zos_data_set.yml b/changelogs/fragments/1347-update-docstring-zos_data_set.yml deleted file mode 100644 index 581ab1aa9..000000000 --- a/changelogs/fragments/1347-update-docstring-zos_data_set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_data_set - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1347). \ No newline at end of file diff --git a/changelogs/fragments/1348-update-docstring-zos_encode.yml b/changelogs/fragments/1348-update-docstring-zos_encode.yml deleted file mode 100644 index de9c11c17..000000000 --- a/changelogs/fragments/1348-update-docstring-zos_encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1348). \ No newline at end of file diff --git a/changelogs/fragments/1349-update-docstring-zos_fetch.yml b/changelogs/fragments/1349-update-docstring-zos_fetch.yml deleted file mode 100644 index a38504c36..000000000 --- a/changelogs/fragments/1349-update-docstring-zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1349). \ No newline at end of file diff --git a/changelogs/fragments/1350-update-docstring-zos_find.yml b/changelogs/fragments/1350-update-docstring-zos_find.yml deleted file mode 100644 index 48c1fbce1..000000000 --- a/changelogs/fragments/1350-update-docstring-zos_find.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_find - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1350). \ No newline at end of file diff --git a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml deleted file mode 100644 index 31fe8dfda..000000000 --- a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_gather_facts - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1351). \ No newline at end of file diff --git a/changelogs/fragments/1352-update-docstring-zos_job_output.yml b/changelogs/fragments/1352-update-docstring-zos_job_output.yml deleted file mode 100644 index 78aac0cac..000000000 --- a/changelogs/fragments/1352-update-docstring-zos_job_output.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_output - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1352). \ No newline at end of file diff --git a/changelogs/fragments/1353-update-docstring-zos_job_query.yml b/changelogs/fragments/1353-update-docstring-zos_job_query.yml deleted file mode 100644 index 550be9107..000000000 --- a/changelogs/fragments/1353-update-docstring-zos_job_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_query - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1353). \ No newline at end of file diff --git a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml deleted file mode 100644 index c2c0a4b99..000000000 --- a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1354). \ No newline at end of file diff --git a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml deleted file mode 100644 index 3840b2862..000000000 --- a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1355). \ No newline at end of file diff --git a/changelogs/fragments/1356-update-docstring-zos_mount.yml b/changelogs/fragments/1356-update-docstring-zos_mount.yml deleted file mode 100644 index a2c09caa5..000000000 --- a/changelogs/fragments/1356-update-docstring-zos_mount.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_mount - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1356). \ No newline at end of file diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml deleted file mode 100644 index e181d8b0f..000000000 --- a/changelogs/fragments/1388-lowercase-choices.yml +++ /dev/null @@ -1,106 +0,0 @@ -breaking_changes: - - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase - choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_backup_restore - option ``space_type`` no longer accepts uppercase - choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``space_type`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``record_format`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should - replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_submit - option ``location`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``fs_type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``automove`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of - ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. - This also applies when defining a ``dd_data_set`` inside ``dd_concat``. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - -trivial: - - zos_blockinfile - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_find - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_lineinfile - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_encode - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_fetch - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_output - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_query - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). \ No newline at end of file diff --git a/changelogs/fragments/1390-update-docstring-zos_script.yml b/changelogs/fragments/1390-update-docstring-zos_script.yml deleted file mode 100644 index 792bf9698..000000000 --- a/changelogs/fragments/1390-update-docstring-zos_script.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_script - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1390). \ No newline at end of file diff --git a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml deleted file mode 100644 index c435799d4..000000000 --- a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_tso_command - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1391). \ No newline at end of file diff --git a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml deleted file mode 100644 index 4536f186c..000000000 --- a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_volume_init - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1392). \ No newline at end of file diff --git a/changelogs/fragments/1393-update-docstring-zos_apf.yml b/changelogs/fragments/1393-update-docstring-zos_apf.yml deleted file mode 100644 index 8a89b7aa0..000000000 --- a/changelogs/fragments/1393-update-docstring-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1393). \ No newline at end of file diff --git a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml deleted file mode 100644 index 25c34fd89..000000000 --- a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator_action_query - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1394). \ No newline at end of file diff --git a/changelogs/fragments/1443-zos_find-filter-size.yml b/changelogs/fragments/1443-zos_find-filter-size.yml deleted file mode 100644 index a5a8ce029..000000000 --- a/changelogs/fragments/1443-zos_find-filter-size.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size - for PDS/Es. - (https://github.com/ansible-collections/ibm_zos_core/pull/1443). \ No newline at end of file diff --git a/changelogs/fragments/692-changelog-lint-ac-tool.yml b/changelogs/fragments/692-changelog-lint-ac-tool.yml deleted file mode 100644 index cbf6bab7d..000000000 --- a/changelogs/fragments/692-changelog-lint-ac-tool.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - ac - Added new command ac-changelog into ac tool to run changelog - fragments lint and changelog release generation. - (https://github.com/ansible-collections/ibm_zos_core/pull/1304). - - - workflows/ac_changelog - Added new github action that will lint - changelog fragments upon a new pull request. - (https://github.com/ansible-collections/ibm_zos_core/pull/1304). \ No newline at end of file diff --git a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml deleted file mode 100644 index e02daed4c..000000000 --- a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml +++ /dev/null @@ -1,6 +0,0 @@ -trivial: - - job.py - generalized resolution of query_exception that may be thrown. - This should prevent the stack trace. - (https://github.com/ansible-collections/ibm_zos_core/pull/1383). - - test_zos_job_submit.py - Removed code that was hiding if a duration was not returned. - (https://github.com/ansible-collections/ibm_zos_core/pull/1383). diff --git a/changelogs/fragments/992-fix-sanity4to6.yml b/changelogs/fragments/992-fix-sanity4to6.yml deleted file mode 100644 index 3d9637c63..000000000 --- a/changelogs/fragments/992-fix-sanity4to6.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_data_set.py - Corrected references to input variable definitions - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). - - data_set.py - Updated exception handler to match what was returned. - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). - - test_zos_data_set_func.py - Removed test of discontinued function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). diff --git a/changelogs/fragments/v1.10.0-beta.1_summary.yml b/changelogs/fragments/v1.10.0-beta.1_summary.yml deleted file mode 100644 index 0c1e35217..000000000 --- a/changelogs/fragments/v1.10.0-beta.1_summary.yml +++ /dev/null @@ -1,6 +0,0 @@ -release_summary: | - Release Date: '2024-05-08' - This changelog describes all changes made to the modules and plugins included - in this collection. The release date is the date the changelog is created. - For additional details such as required dependencies and availability review - the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From 9bf22c74198e563e73f6043192502c0b3411fffd Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 14 May 2024 16:53:16 -0700 Subject: [PATCH 383/413] Revert "Delete changelog fragements since changelog has been generated" This reverts commit 0b12772164500e700e6357a4b7b1cceda3ef7589. --- ac | 101 ++--------------- .../fragments/1032-clean-job_submit-test.yml | 3 + ...-lineinfile-remove-zos_copy-dependency.yml | 3 + .../1156-zos_archive-remove-zos_copy_dep.yml | 3 + ...-remove-zos-copy-from-zos-encode-tests.yml | 3 + ...165-remove-zos-copy-dep-from-zos-fetch.yml | 3 + ...ve-zos-copy-from-zos-blockinfile-tests.yml | 3 + .../1169-util-job-zoau-migration.yml | 3 + ...e-zos_encode-from_zos_lineinfile-tests.yml | 3 + .../1181-zoau-migration-zos_operator.yml | 4 + .../1182-migrate-module-utils-data-set.yml | 3 + changelogs/fragments/1183-copy-members.yml | 3 + ...184-remove-zos-fetch-dep-from-zos-copy.yml | 3 + .../1187-migrate-module-utils-copy.yml | 3 + .../1188-migrate-module_utils-backup.yml | 3 + .../1189-migrate-module_utils-encode.yml | 3 + ...1190-migrate-module_utils-dd_statement.yml | 3 + .../1196-zoau-migration-zos_gather_facts.yml | 4 + .../1202-doc-gen-script-portability.yml | 4 + changelogs/fragments/1204-migrate-zos_apf.yml | 12 ++ .../1209-zoau-migration-zos_job_submit.yml | 3 + ...1215-Migrate_zos_operator_action_query.yml | 4 + ...lidate_module_zos_job_output_migration.yml | 3 + .../fragments/1217-validate-job-query.yml | 3 + .../fragments/1218-migrate-zos_encode.yml | 3 + ...20-bugfix-zos_job_submit-default_value.yml | 4 + .../1222-zoau-migration-zos_copy.yml | 3 + .../fragments/1227-migrate-zos_archive.yml | 3 + ...228-zos_find-remove-zos_lineinfile_dep.yml | 3 + .../fragments/1229-migrate-zos_fetch.yml | 3 + .../fragments/1237-migrate-zos_mount.yml | 4 + .../fragments/1238-migrate-zos_unarchive.yml | 3 + .../1242-zoau-migration-zos_data_set.yml | 3 + ...Migrate_zos_blockinfile_and_lineinfile.yml | 4 + .../fragments/1257-zoau-import-zos_apf.yml | 3 + .../1261-job-submit-non-utf8-chars.yml | 9 ++ .../1265_Migrate_zos_backup_restore.yml | 7 ++ ...0-quick-fix-len-of-volumes-work-around.yml | 5 + ...-update-zos_archive-zos_unarchive-docs.yml | 5 + .../1292-doc-zos_tso_command-example.yml | 4 + .../fragments/1295-doc-zos_ping-scp.yml | 7 ++ ...98-Remove_local_charset_from_zos_fetch.yml | 3 + .../fragments/1307-update-sanity-zos_copy.yml | 10 ++ .../1320-Zos_mvs_raw_ignores_tmp_hlq.yml | 5 + .../1322-update-docstring-encode.yml | 3 + .../1331-update-docstring-ickdsf.yml | 3 + .../1332-update-docstring-import_handler.yml | 3 + .../fragments/1333-update-docstring-job.yml | 3 + .../1336-update-docstring-validation.yml | 3 + .../1340-Work_around_fix_false_positive.yml | 4 + ...re_than_0_doesn_not_put_change_as_true.yml | 5 + .../1347-update-docstring-zos_data_set.yml | 3 + .../1348-update-docstring-zos_encode.yml | 3 + .../1349-update-docstring-zos_fetch.yml | 3 + .../1350-update-docstring-zos_find.yml | 3 + ...1351-update-docstring-zos_gather_facts.yml | 3 + .../1352-update-docstring-zos_job_output.yml | 3 + .../1353-update-docstring-zos_job_query.yml | 3 + .../1354-update-docstring-zos_job_submit.yml | 3 + .../1355-update-docstring-zos_lineinfile.yml | 3 + .../1356-update-docstring-zos_mount.yml | 3 + .../fragments/1388-lowercase-choices.yml | 106 ++++++++++++++++++ .../1390-update-docstring-zos_script.yml | 3 + .../1391-update-docstring-zos_tso_command.yml | 3 + .../1392-update-docstring-zos_volume_init.yml | 3 + .../1393-update-docstring-zos_apf.yml | 3 + ...te_docstring-zos_operator_action_query.yml | 3 + .../fragments/1443-zos_find-filter-size.yml | 4 + .../fragments/692-changelog-lint-ac-tool.yml | 8 ++ .../971-bug-job_submit-can-stacktrace.yml | 6 + changelogs/fragments/992-fix-sanity4to6.yml | 7 ++ .../fragments/v1.10.0-beta.1_summary.yml | 6 + 72 files changed, 391 insertions(+), 89 deletions(-) create mode 100644 changelogs/fragments/1032-clean-job_submit-test.yml create mode 100644 changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml create mode 100644 changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml create mode 100644 changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml create mode 100644 changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml create mode 100644 changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml create mode 100644 changelogs/fragments/1169-util-job-zoau-migration.yml create mode 100644 changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml create mode 100644 changelogs/fragments/1181-zoau-migration-zos_operator.yml create mode 100644 changelogs/fragments/1182-migrate-module-utils-data-set.yml create mode 100644 changelogs/fragments/1183-copy-members.yml create mode 100644 changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml create mode 100644 changelogs/fragments/1187-migrate-module-utils-copy.yml create mode 100644 changelogs/fragments/1188-migrate-module_utils-backup.yml create mode 100644 changelogs/fragments/1189-migrate-module_utils-encode.yml create mode 100644 changelogs/fragments/1190-migrate-module_utils-dd_statement.yml create mode 100644 changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml create mode 100644 changelogs/fragments/1202-doc-gen-script-portability.yml create mode 100644 changelogs/fragments/1204-migrate-zos_apf.yml create mode 100644 changelogs/fragments/1209-zoau-migration-zos_job_submit.yml create mode 100644 changelogs/fragments/1215-Migrate_zos_operator_action_query.yml create mode 100644 changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml create mode 100644 changelogs/fragments/1217-validate-job-query.yml create mode 100644 changelogs/fragments/1218-migrate-zos_encode.yml create mode 100644 changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml create mode 100644 changelogs/fragments/1222-zoau-migration-zos_copy.yml create mode 100644 changelogs/fragments/1227-migrate-zos_archive.yml create mode 100644 changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml create mode 100644 changelogs/fragments/1229-migrate-zos_fetch.yml create mode 100644 changelogs/fragments/1237-migrate-zos_mount.yml create mode 100644 changelogs/fragments/1238-migrate-zos_unarchive.yml create mode 100644 changelogs/fragments/1242-zoau-migration-zos_data_set.yml create mode 100644 changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml create mode 100644 changelogs/fragments/1257-zoau-import-zos_apf.yml create mode 100644 changelogs/fragments/1261-job-submit-non-utf8-chars.yml create mode 100644 changelogs/fragments/1265_Migrate_zos_backup_restore.yml create mode 100644 changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml create mode 100644 changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml create mode 100644 changelogs/fragments/1292-doc-zos_tso_command-example.yml create mode 100644 changelogs/fragments/1295-doc-zos_ping-scp.yml create mode 100644 changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml create mode 100644 changelogs/fragments/1307-update-sanity-zos_copy.yml create mode 100644 changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml create mode 100644 changelogs/fragments/1322-update-docstring-encode.yml create mode 100644 changelogs/fragments/1331-update-docstring-ickdsf.yml create mode 100644 changelogs/fragments/1332-update-docstring-import_handler.yml create mode 100644 changelogs/fragments/1333-update-docstring-job.yml create mode 100644 changelogs/fragments/1336-update-docstring-validation.yml create mode 100644 changelogs/fragments/1340-Work_around_fix_false_positive.yml create mode 100644 changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml create mode 100644 changelogs/fragments/1347-update-docstring-zos_data_set.yml create mode 100644 changelogs/fragments/1348-update-docstring-zos_encode.yml create mode 100644 changelogs/fragments/1349-update-docstring-zos_fetch.yml create mode 100644 changelogs/fragments/1350-update-docstring-zos_find.yml create mode 100644 changelogs/fragments/1351-update-docstring-zos_gather_facts.yml create mode 100644 changelogs/fragments/1352-update-docstring-zos_job_output.yml create mode 100644 changelogs/fragments/1353-update-docstring-zos_job_query.yml create mode 100644 changelogs/fragments/1354-update-docstring-zos_job_submit.yml create mode 100644 changelogs/fragments/1355-update-docstring-zos_lineinfile.yml create mode 100644 changelogs/fragments/1356-update-docstring-zos_mount.yml create mode 100644 changelogs/fragments/1388-lowercase-choices.yml create mode 100644 changelogs/fragments/1390-update-docstring-zos_script.yml create mode 100644 changelogs/fragments/1391-update-docstring-zos_tso_command.yml create mode 100644 changelogs/fragments/1392-update-docstring-zos_volume_init.yml create mode 100644 changelogs/fragments/1393-update-docstring-zos_apf.yml create mode 100644 changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml create mode 100644 changelogs/fragments/1443-zos_find-filter-size.yml create mode 100644 changelogs/fragments/692-changelog-lint-ac-tool.yml create mode 100644 changelogs/fragments/971-bug-job_submit-can-stacktrace.yml create mode 100644 changelogs/fragments/992-fix-sanity4to6.yml create mode 100644 changelogs/fragments/v1.10.0-beta.1_summary.yml diff --git a/ac b/ac index 0f5bc07b4..9aee6a02d 100755 --- a/ac +++ b/ac @@ -34,7 +34,6 @@ normalize_version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; } -# Method determines the lastest (highest number) version venv that is managed by ./ac latest_venv(){ dir_version_latest="0" test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` @@ -49,33 +48,6 @@ latest_venv(){ fi } -# Method will take a venv name such as venv-2.16 and validate that it exists -validate_venv(){ - option_venv=$1 - #test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` - - if [[ "$option_venv" =~ "latest" ]]; then - test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-latest* 2>/dev/null` - if [[ "$test_for_managed_venv" =~ "latest" ]]; then - dir_version_latest=$option_venv - fi - #elif [ ! -z "$test_for_managed_venv" ]; then - else - for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | rev | cut -d"/" -f1| rev`; do - if [ $dir_version == $option_venv ]; then - dir_version_latest=$dir_version - fi - done - fi - - if [ ! -z "$dir_version_latest" ]; then - echo "${VENV_HOME_MANAGED}"/$dir_version_latest - else - echo "Unable to validate managed venv option $option_venv, exiting." - exit - fi -} - VENV=`latest_venv` file="" @@ -331,18 +303,6 @@ ac_install(){ fi } -# Run a make module doc -# ------------------------------------------------------------------------------ -#->ac-module-doc: -## Runs make module-doc to generate the module documentation -## Usage: ac [--ac-module-doc] -## Example: -## $ ac --ac-module-doc -ac_module_doc(){ - message "Running make module-doc" - . $VENV_BIN/activate && cd docs/ && make module-doc -} - # ------------------------------------------------------------------------------ # Run ansible-lint on the locally checked out GH Branch # ------------------------------------------------------------------------------ @@ -675,24 +635,11 @@ venv_setup(){ # TODO: Allow user to specify which venv they can start # ------------------------------------------------------------------------------ #->venv-start: -## Activate the latest ansible managed virtual environment or optionally start -## by its name. -## Usage: ac [--venv-start --name <venv name>] +## Activate the lastest ansible managed virtual environment. +## Usage: ac [--venv-start] ## Example: -## $ ac --venv-start --name venv-2.16 ## $ ac --venv-start venv_start(){ - option_name=$1 - - if [ "$option_name" ]; then - VENV=`validate_venv $option_name` - - if [ ! -z "$VENV" ]; then - VENV_BIN=$VENV/bin - VENV_BASENAME=`basename $VENV` - fi - fi - message "Starting managed python virtual environment: $VENV_BASENAME" #. $VENV_BIN/activate; exec /bin/sh -i /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i" @@ -703,27 +650,14 @@ venv_start(){ # TODO: Allow user to specify which venv they can stop # ------------------------------------------------------------------------------ #->venv-stop: -## Deactivate the latest ansible managed virtual environment or optionally deactivate -## by its name. -## Usage: ac [--venv-stop --name <venv name>]] +## Deactivate the lastest ansible managed virtual environment. +## Usage: ac [--venv-stop] ## Example: -## $ ac --venv-stop --name venv-2.16 ## $ ac --venv-stop venv_stop(){ - option_name=$1 - - if [ "$option_name" ]; then - VENV=`validate_venv $option_name` - - if [ ! -z "$VENV" ]; then - VENV_BIN=$VENV/bin - VENV_BASENAME=`basename $VENV` - fi - fi - message "Stopping managed ansible virtual environment located at: $VENV_BASENAME" message "ac --venv-stop does not actually currently work, use CNTL-D" - . deactivate $VENV_BASENAME 2>/dev/null; + . deactivate $VENV_BASENAME; } # ============================================================================== @@ -757,18 +691,14 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; - --ac-galaxy-importer) # Command + --ac-galaxy-importer) # Command ensure_managed_venv_exists $1 option_submitted="--ac-galaxy-importer" ;; - --ac-changelog) # Command + --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" ;; - --ac-module-doc) # Command - ensure_managed_venv_exists $1 - option_submitted="--ac-module-doc" - ;; --ac-install) ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" @@ -832,7 +762,7 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--venv-stop" ;; - --command|--command=?*) # option + --command|--command=?*) # option command=`option_processor $1 $2` option_sanitize $command shift @@ -847,7 +777,7 @@ while true; do option_sanitize $file shift ;; - --host|--host=?*) # option + --host|--host=?*) # option host=`option_processor $1 $2` option_sanitize $host shift @@ -857,11 +787,6 @@ while true; do option_sanitize $level shift ;; - --name|--name=?*) # option - name=`option_processor $1 $2` - option_sanitize $name - shift - ;; --out-file|--out-file=?*) # option out_file=`option_processor $1 $2` option_sanitize $out_file @@ -930,8 +855,6 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then ac_changelog $command -elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-module-doc" ] ; then - ac_module_doc elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then @@ -959,7 +882,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then venv_setup $password elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then - venv_start $name + venv_start elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-stop" ] ; then - venv_stop $name -fi \ No newline at end of file + venv_stop +fi diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml new file mode 100644 index 000000000..bb4248aec --- /dev/null +++ b/changelogs/fragments/1032-clean-job_submit-test.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. + (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml new file mode 100644 index 000000000..44015bbd9 --- /dev/null +++ b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - remove zos_copy calls from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1152). diff --git a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml new file mode 100644 index 000000000..ea8aacee9 --- /dev/null +++ b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - Remove zos_copy dependency from zos_archive test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1156). \ No newline at end of file diff --git a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml new file mode 100644 index 000000000..24f2802d5 --- /dev/null +++ b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - Remove zos_copy dependency from zos_encode test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1157). diff --git a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml new file mode 100644 index 000000000..9c8593c1a --- /dev/null +++ b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove zos_copy dependency from zos_fetch test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1165). diff --git a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml new file mode 100644 index 000000000..d7fb725af --- /dev/null +++ b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_blockinfile - Remove zos_copy dependency from zos_blockinfile test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1167). \ No newline at end of file diff --git a/changelogs/fragments/1169-util-job-zoau-migration.yml b/changelogs/fragments/1169-util-job-zoau-migration.yml new file mode 100644 index 000000000..568aa9a4e --- /dev/null +++ b/changelogs/fragments/1169-util-job-zoau-migration.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/job.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1169). diff --git a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml new file mode 100644 index 000000000..a95e1c7e2 --- /dev/null +++ b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - Remove zos_encode dependency from zos_lineinfile test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1179). \ No newline at end of file diff --git a/changelogs/fragments/1181-zoau-migration-zos_operator.yml b/changelogs/fragments/1181-zoau-migration-zos_operator.yml new file mode 100644 index 000000000..7c107de88 --- /dev/null +++ b/changelogs/fragments/1181-zoau-migration-zos_operator.yml @@ -0,0 +1,4 @@ +trivial: + - zos_operator - Update internal functions to account for the change to the + unit of measurement of `timeout` now in centiseconds. + (https://github.com/ansible-collections/ibm_zos_core/pull/1181). \ No newline at end of file diff --git a/changelogs/fragments/1182-migrate-module-utils-data-set.yml b/changelogs/fragments/1182-migrate-module-utils-data-set.yml new file mode 100644 index 000000000..857327254 --- /dev/null +++ b/changelogs/fragments/1182-migrate-module-utils-data-set.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/data_set.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1182). diff --git a/changelogs/fragments/1183-copy-members.yml b/changelogs/fragments/1183-copy-members.yml new file mode 100644 index 000000000..b0b0c7896 --- /dev/null +++ b/changelogs/fragments/1183-copy-members.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. + (https://github.com/ansible-collections/ibm_zos_core/pull/1183). diff --git a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml new file mode 100644 index 000000000..9085743d9 --- /dev/null +++ b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Remove zos_fetch dependency from zos_copy test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1184). diff --git a/changelogs/fragments/1187-migrate-module-utils-copy.yml b/changelogs/fragments/1187-migrate-module-utils-copy.yml new file mode 100644 index 000000000..26157f9fc --- /dev/null +++ b/changelogs/fragments/1187-migrate-module-utils-copy.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/copy.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1187). diff --git a/changelogs/fragments/1188-migrate-module_utils-backup.yml b/changelogs/fragments/1188-migrate-module_utils-backup.yml new file mode 100644 index 000000000..65945d06b --- /dev/null +++ b/changelogs/fragments/1188-migrate-module_utils-backup.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/backup.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1188). \ No newline at end of file diff --git a/changelogs/fragments/1189-migrate-module_utils-encode.yml b/changelogs/fragments/1189-migrate-module_utils-encode.yml new file mode 100644 index 000000000..d7f471847 --- /dev/null +++ b/changelogs/fragments/1189-migrate-module_utils-encode.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/encode.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1189). diff --git a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml new file mode 100644 index 000000000..4bb3a582d --- /dev/null +++ b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/dd_statement.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1190). diff --git a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml new file mode 100644 index 000000000..03f39b535 --- /dev/null +++ b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml @@ -0,0 +1,4 @@ +trivial: + - zos_gather_facts - Update module internally to leverage ZOAU python API + for zinfo. + (https://github.com/ansible-collections/ibm_zos_core/pull/1196). \ No newline at end of file diff --git a/changelogs/fragments/1202-doc-gen-script-portability.yml b/changelogs/fragments/1202-doc-gen-script-portability.yml new file mode 100644 index 000000000..3c2e6ddbb --- /dev/null +++ b/changelogs/fragments/1202-doc-gen-script-portability.yml @@ -0,0 +1,4 @@ +trivial: + - docs/scripts - Change to sed "-i" in place option which ensures compatibility between MacOS + and GNU versions of sed command. + (https://github.com/ansible-collections/ibm_zos_core/pull/1202). diff --git a/changelogs/fragments/1204-migrate-zos_apf.yml b/changelogs/fragments/1204-migrate-zos_apf.yml new file mode 100644 index 000000000..89db1abd2 --- /dev/null +++ b/changelogs/fragments/1204-migrate-zos_apf.yml @@ -0,0 +1,12 @@ +bugfixes: + - zos_apf - List option only returned one data set. Fix now returns + the list of retrieved data sets. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + +minor_changes: + - zos_apf - Enhanced error messages when an exception is caught. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + +trivial: + - zos_apf - Migrated the module to use ZOAU v1.3.0 json schema. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). \ No newline at end of file diff --git a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml new file mode 100644 index 000000000..6f58e2713 --- /dev/null +++ b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Migrated the module to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1209). \ No newline at end of file diff --git a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml new file mode 100644 index 000000000..be18056b3 --- /dev/null +++ b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml @@ -0,0 +1,4 @@ +trivial: + - zos_operator_action_query - Update internal functions to account for the change to the + unit of measurement of `timeout` now in centiseconds. + (https://github.com/ansible-collections/ibm_zos_core/pull/1215). \ No newline at end of file diff --git a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml new file mode 100644 index 000000000..65d3d3c08 --- /dev/null +++ b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_output - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1216). diff --git a/changelogs/fragments/1217-validate-job-query.yml b/changelogs/fragments/1217-validate-job-query.yml new file mode 100644 index 000000000..df97c3ca6 --- /dev/null +++ b/changelogs/fragments/1217-validate-job-query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_query - Removed zos_job_submit wait argument from tests. + (https://github.com/ansible-collections/ibm_zos_core/pull/1217). \ No newline at end of file diff --git a/changelogs/fragments/1218-migrate-zos_encode.yml b/changelogs/fragments/1218-migrate-zos_encode.yml new file mode 100644 index 000000000..3d712b749 --- /dev/null +++ b/changelogs/fragments/1218-migrate-zos_encode.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1218). diff --git a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml new file mode 100644 index 000000000..83d2391ba --- /dev/null +++ b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. + (https://github.com/ansible-collections/ibm_zos_core/pull/1220). \ No newline at end of file diff --git a/changelogs/fragments/1222-zoau-migration-zos_copy.yml b/changelogs/fragments/1222-zoau-migration-zos_copy.yml new file mode 100644 index 000000000..edc6eec06 --- /dev/null +++ b/changelogs/fragments/1222-zoau-migration-zos_copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Migrated the module to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1222). \ No newline at end of file diff --git a/changelogs/fragments/1227-migrate-zos_archive.yml b/changelogs/fragments/1227-migrate-zos_archive.yml new file mode 100644 index 000000000..820593c95 --- /dev/null +++ b/changelogs/fragments/1227-migrate-zos_archive.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1227). diff --git a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml new file mode 100644 index 000000000..67642d563 --- /dev/null +++ b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml @@ -0,0 +1,3 @@ +trivial: + - zos_find - Removed zos_lineinfile dependency from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1228). diff --git a/changelogs/fragments/1229-migrate-zos_fetch.yml b/changelogs/fragments/1229-migrate-zos_fetch.yml new file mode 100644 index 000000000..07f9a26b4 --- /dev/null +++ b/changelogs/fragments/1229-migrate-zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1229). diff --git a/changelogs/fragments/1237-migrate-zos_mount.yml b/changelogs/fragments/1237-migrate-zos_mount.yml new file mode 100644 index 000000000..d4787d42d --- /dev/null +++ b/changelogs/fragments/1237-migrate-zos_mount.yml @@ -0,0 +1,4 @@ +trivial: + - tests/functional/modules/test_zos_mount_func.py - migrate code to use + ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1237). diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml new file mode 100644 index 000000000..8afe97d29 --- /dev/null +++ b/changelogs/fragments/1238-migrate-zos_unarchive.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1238). \ No newline at end of file diff --git a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml new file mode 100644 index 000000000..851783900 --- /dev/null +++ b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml @@ -0,0 +1,3 @@ +trivial: + - zos_data_set - Refactor data_set module_util and functional tests for ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1242). \ No newline at end of file diff --git a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml new file mode 100644 index 000000000..e2e841e9c --- /dev/null +++ b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml @@ -0,0 +1,4 @@ +trivial: + - zos_lineinfile - migrate code to use ZOAU v1.3.0. + - zos_blockinfile - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1256). diff --git a/changelogs/fragments/1257-zoau-import-zos_apf.yml b/changelogs/fragments/1257-zoau-import-zos_apf.yml new file mode 100644 index 000000000..71b46ba1b --- /dev/null +++ b/changelogs/fragments/1257-zoau-import-zos_apf.yml @@ -0,0 +1,3 @@ +trivial: + - zos_apf - Updated ZOAU imports from the module to capture traceback. + (https://github.com/ansible-collections/ibm_zos_core/pull/1257). diff --git a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml new file mode 100644 index 000000000..7f322afe4 --- /dev/null +++ b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml @@ -0,0 +1,9 @@ +bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. + (https://github.com/ansible-collections/ibm_zos_core/pull/1261). +trivial: + - zos_job_submit - add test case to validate a bugfix in ZOAU v1.3.0 that + handles non-UTF8 characters correctly in a job's output. + (https://github.com/ansible-collections/ibm_zos_core/pull/1261). \ No newline at end of file diff --git a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml new file mode 100644 index 000000000..9afe4afc3 --- /dev/null +++ b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml @@ -0,0 +1,7 @@ +trivial: + - zos_backup_restore - Refactor zos_backup_restore module and functional tests for ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1265). +minor_changes: + - zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier + (HLQ) for temporary and backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/1265). \ No newline at end of file diff --git a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml new file mode 100644 index 000000000..1f6ba201d --- /dev/null +++ b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml @@ -0,0 +1,5 @@ +trivial: + - module_utils/data_set.py - len(volme) was always called on receiving + DatasetVerificationError from Dataset.create() even though volumes=None was + a valid possible outcome. The fix adds a null check to the conditional. + (https://github.com/ansible-collections/ibm_zos_core/pull/1270). \ No newline at end of file diff --git a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml new file mode 100644 index 000000000..ef213b06f --- /dev/null +++ b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml @@ -0,0 +1,5 @@ +trivial: + - zos_archive - Updated examples to use path instead of src. + (https://github.com/ansible-collections/ibm_zos_core/pull/1286). + - zos_unarchive - Updated examples and return dict to use path instead of src. + (https://github.com/ansible-collections/ibm_zos_core/pull/1286). \ No newline at end of file diff --git a/changelogs/fragments/1292-doc-zos_tso_command-example.yml b/changelogs/fragments/1292-doc-zos_tso_command-example.yml new file mode 100644 index 000000000..6ed868be7 --- /dev/null +++ b/changelogs/fragments/1292-doc-zos_tso_command-example.yml @@ -0,0 +1,4 @@ +trivial: + - zos_tso_command - Added an example on how to chain multiple TSO commands such + that they are invoked together when dependent on each other. + (https://github.com/ansible-collections/ibm_zos_core/pull/1293). \ No newline at end of file diff --git a/changelogs/fragments/1295-doc-zos_ping-scp.yml b/changelogs/fragments/1295-doc-zos_ping-scp.yml new file mode 100644 index 000000000..a9477150d --- /dev/null +++ b/changelogs/fragments/1295-doc-zos_ping-scp.yml @@ -0,0 +1,7 @@ +trivial: + - zos_ping - Update zos_ping documentation to instruct users how + to fall back to legacy SCP when using OpenSSH 9.0 or later. + (https://github.com/ansible-collections/ibm_zos_core/pull/1295). + - zos_ping - Update zos_ping REXX source to check for python + version 3.10 or later. + (https://github.com/ansible-collections/ibm_zos_core/pull/1295). \ No newline at end of file diff --git a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml new file mode 100644 index 000000000..ca1ea840e --- /dev/null +++ b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove argument not documented. + (https://github.com/ansible-collections/ibm_zos_core/pull/1298). \ No newline at end of file diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml new file mode 100644 index 000000000..858f0b64c --- /dev/null +++ b/changelogs/fragments/1307-update-sanity-zos_copy.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_copy - Documented `group` and `owner` options. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + +trivial: + - zos_copy - Removed many of the variables that were passed from the + action plugin to the module, reimplementing the logic inside the + module instead. Removed the use of temp_path variable inside zos_copy + in favor of using remote_src to deal with files copied to remote. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml new file mode 100644 index 000000000..058faf66e --- /dev/null +++ b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. + Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created + during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/changelogs/fragments/1322-update-docstring-encode.yml b/changelogs/fragments/1322-update-docstring-encode.yml new file mode 100644 index 000000000..dd5eb5389 --- /dev/null +++ b/changelogs/fragments/1322-update-docstring-encode.yml @@ -0,0 +1,3 @@ +trivial: + - encode - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1322). \ No newline at end of file diff --git a/changelogs/fragments/1331-update-docstring-ickdsf.yml b/changelogs/fragments/1331-update-docstring-ickdsf.yml new file mode 100644 index 000000000..545ba95c1 --- /dev/null +++ b/changelogs/fragments/1331-update-docstring-ickdsf.yml @@ -0,0 +1,3 @@ +trivial: + - ickdsf - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1331). \ No newline at end of file diff --git a/changelogs/fragments/1332-update-docstring-import_handler.yml b/changelogs/fragments/1332-update-docstring-import_handler.yml new file mode 100644 index 000000000..5b32cd32e --- /dev/null +++ b/changelogs/fragments/1332-update-docstring-import_handler.yml @@ -0,0 +1,3 @@ +trivial: + - import_handler - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1332). \ No newline at end of file diff --git a/changelogs/fragments/1333-update-docstring-job.yml b/changelogs/fragments/1333-update-docstring-job.yml new file mode 100644 index 000000000..124ef2cae --- /dev/null +++ b/changelogs/fragments/1333-update-docstring-job.yml @@ -0,0 +1,3 @@ +trivial: + - job - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1333). \ No newline at end of file diff --git a/changelogs/fragments/1336-update-docstring-validation.yml b/changelogs/fragments/1336-update-docstring-validation.yml new file mode 100644 index 000000000..547103d46 --- /dev/null +++ b/changelogs/fragments/1336-update-docstring-validation.yml @@ -0,0 +1,3 @@ +trivial: + - validation - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1336). \ No newline at end of file diff --git a/changelogs/fragments/1340-Work_around_fix_false_positive.yml b/changelogs/fragments/1340-Work_around_fix_false_positive.yml new file mode 100644 index 000000000..8e8360808 --- /dev/null +++ b/changelogs/fragments/1340-Work_around_fix_false_positive.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_blockinfile - Using double quotation marks inside a block resulted in a false + positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/1340). \ No newline at end of file diff --git a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml new file mode 100644 index 000000000..a09b8fa64 --- /dev/null +++ b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_job_submit - when the argument max_rc was different than 0 the changed response returned + as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above + or equal to the value of the job. + (https://github.com/ansible-collections/ibm_zos_core/pull/1345). \ No newline at end of file diff --git a/changelogs/fragments/1347-update-docstring-zos_data_set.yml b/changelogs/fragments/1347-update-docstring-zos_data_set.yml new file mode 100644 index 000000000..581ab1aa9 --- /dev/null +++ b/changelogs/fragments/1347-update-docstring-zos_data_set.yml @@ -0,0 +1,3 @@ +trivial: + - zos_data_set - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1347). \ No newline at end of file diff --git a/changelogs/fragments/1348-update-docstring-zos_encode.yml b/changelogs/fragments/1348-update-docstring-zos_encode.yml new file mode 100644 index 000000000..de9c11c17 --- /dev/null +++ b/changelogs/fragments/1348-update-docstring-zos_encode.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1348). \ No newline at end of file diff --git a/changelogs/fragments/1349-update-docstring-zos_fetch.yml b/changelogs/fragments/1349-update-docstring-zos_fetch.yml new file mode 100644 index 000000000..a38504c36 --- /dev/null +++ b/changelogs/fragments/1349-update-docstring-zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1349). \ No newline at end of file diff --git a/changelogs/fragments/1350-update-docstring-zos_find.yml b/changelogs/fragments/1350-update-docstring-zos_find.yml new file mode 100644 index 000000000..48c1fbce1 --- /dev/null +++ b/changelogs/fragments/1350-update-docstring-zos_find.yml @@ -0,0 +1,3 @@ +trivial: + - zos_find - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1350). \ No newline at end of file diff --git a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml new file mode 100644 index 000000000..31fe8dfda --- /dev/null +++ b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml @@ -0,0 +1,3 @@ +trivial: + - zos_gather_facts - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1351). \ No newline at end of file diff --git a/changelogs/fragments/1352-update-docstring-zos_job_output.yml b/changelogs/fragments/1352-update-docstring-zos_job_output.yml new file mode 100644 index 000000000..78aac0cac --- /dev/null +++ b/changelogs/fragments/1352-update-docstring-zos_job_output.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_output - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1352). \ No newline at end of file diff --git a/changelogs/fragments/1353-update-docstring-zos_job_query.yml b/changelogs/fragments/1353-update-docstring-zos_job_query.yml new file mode 100644 index 000000000..550be9107 --- /dev/null +++ b/changelogs/fragments/1353-update-docstring-zos_job_query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_query - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1353). \ No newline at end of file diff --git a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml new file mode 100644 index 000000000..c2c0a4b99 --- /dev/null +++ b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1354). \ No newline at end of file diff --git a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml new file mode 100644 index 000000000..3840b2862 --- /dev/null +++ b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1355). \ No newline at end of file diff --git a/changelogs/fragments/1356-update-docstring-zos_mount.yml b/changelogs/fragments/1356-update-docstring-zos_mount.yml new file mode 100644 index 000000000..a2c09caa5 --- /dev/null +++ b/changelogs/fragments/1356-update-docstring-zos_mount.yml @@ -0,0 +1,3 @@ +trivial: + - zos_mount - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1356). \ No newline at end of file diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml new file mode 100644 index 000000000..e181d8b0f --- /dev/null +++ b/changelogs/fragments/1388-lowercase-choices.yml @@ -0,0 +1,106 @@ +breaking_changes: + - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_backup_restore - option ``space_type`` no longer accepts uppercase + choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``space_type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``record_format`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should + replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_submit - option ``location`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``fs_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``automove`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of + ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. + This also applies when defining a ``dd_data_set`` inside ``dd_concat``. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + +trivial: + - zos_blockinfile - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_find - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_lineinfile - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_encode - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_fetch - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_output - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_query - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). \ No newline at end of file diff --git a/changelogs/fragments/1390-update-docstring-zos_script.yml b/changelogs/fragments/1390-update-docstring-zos_script.yml new file mode 100644 index 000000000..792bf9698 --- /dev/null +++ b/changelogs/fragments/1390-update-docstring-zos_script.yml @@ -0,0 +1,3 @@ +trivial: + - zos_script - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1390). \ No newline at end of file diff --git a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml new file mode 100644 index 000000000..c435799d4 --- /dev/null +++ b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml @@ -0,0 +1,3 @@ +trivial: + - zos_tso_command - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1391). \ No newline at end of file diff --git a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml new file mode 100644 index 000000000..4536f186c --- /dev/null +++ b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml @@ -0,0 +1,3 @@ +trivial: + - zos_volume_init - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1392). \ No newline at end of file diff --git a/changelogs/fragments/1393-update-docstring-zos_apf.yml b/changelogs/fragments/1393-update-docstring-zos_apf.yml new file mode 100644 index 000000000..8a89b7aa0 --- /dev/null +++ b/changelogs/fragments/1393-update-docstring-zos_apf.yml @@ -0,0 +1,3 @@ +trivial: + - zos_apf - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1393). \ No newline at end of file diff --git a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml new file mode 100644 index 000000000..25c34fd89 --- /dev/null +++ b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator_action_query - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1394). \ No newline at end of file diff --git a/changelogs/fragments/1443-zos_find-filter-size.yml b/changelogs/fragments/1443-zos_find-filter-size.yml new file mode 100644 index 000000000..a5a8ce029 --- /dev/null +++ b/changelogs/fragments/1443-zos_find-filter-size.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size + for PDS/Es. + (https://github.com/ansible-collections/ibm_zos_core/pull/1443). \ No newline at end of file diff --git a/changelogs/fragments/692-changelog-lint-ac-tool.yml b/changelogs/fragments/692-changelog-lint-ac-tool.yml new file mode 100644 index 000000000..cbf6bab7d --- /dev/null +++ b/changelogs/fragments/692-changelog-lint-ac-tool.yml @@ -0,0 +1,8 @@ +trivial: + - ac - Added new command ac-changelog into ac tool to run changelog + fragments lint and changelog release generation. + (https://github.com/ansible-collections/ibm_zos_core/pull/1304). + + - workflows/ac_changelog - Added new github action that will lint + changelog fragments upon a new pull request. + (https://github.com/ansible-collections/ibm_zos_core/pull/1304). \ No newline at end of file diff --git a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml new file mode 100644 index 000000000..e02daed4c --- /dev/null +++ b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml @@ -0,0 +1,6 @@ +trivial: + - job.py - generalized resolution of query_exception that may be thrown. + This should prevent the stack trace. + (https://github.com/ansible-collections/ibm_zos_core/pull/1383). + - test_zos_job_submit.py - Removed code that was hiding if a duration was not returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/1383). diff --git a/changelogs/fragments/992-fix-sanity4to6.yml b/changelogs/fragments/992-fix-sanity4to6.yml new file mode 100644 index 000000000..3d9637c63 --- /dev/null +++ b/changelogs/fragments/992-fix-sanity4to6.yml @@ -0,0 +1,7 @@ +trivial: + - zos_data_set.py - Corrected references to input variable definitions + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). + - data_set.py - Updated exception handler to match what was returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). + - test_zos_data_set_func.py - Removed test of discontinued function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). diff --git a/changelogs/fragments/v1.10.0-beta.1_summary.yml b/changelogs/fragments/v1.10.0-beta.1_summary.yml new file mode 100644 index 000000000..0c1e35217 --- /dev/null +++ b/changelogs/fragments/v1.10.0-beta.1_summary.yml @@ -0,0 +1,6 @@ +release_summary: | + Release Date: '2024-05-08' + This changelog describes all changes made to the modules and plugins included + in this collection. The release date is the date the changelog is created. + For additional details such as required dependencies and availability review + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From ac700e9071052ca7c5468b108ec519391d785156 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 14 May 2024 16:54:55 -0700 Subject: [PATCH 384/413] Remove changelog fragments given log has been generated Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1032-clean-job_submit-test.yml | 3 - ...-lineinfile-remove-zos_copy-dependency.yml | 3 - .../1156-zos_archive-remove-zos_copy_dep.yml | 3 - ...-remove-zos-copy-from-zos-encode-tests.yml | 3 - ...165-remove-zos-copy-dep-from-zos-fetch.yml | 3 - ...ve-zos-copy-from-zos-blockinfile-tests.yml | 3 - .../1169-util-job-zoau-migration.yml | 3 - ...e-zos_encode-from_zos_lineinfile-tests.yml | 3 - .../1181-zoau-migration-zos_operator.yml | 4 - .../1182-migrate-module-utils-data-set.yml | 3 - changelogs/fragments/1183-copy-members.yml | 3 - ...184-remove-zos-fetch-dep-from-zos-copy.yml | 3 - .../1187-migrate-module-utils-copy.yml | 3 - .../1188-migrate-module_utils-backup.yml | 3 - .../1189-migrate-module_utils-encode.yml | 3 - ...1190-migrate-module_utils-dd_statement.yml | 3 - .../1196-zoau-migration-zos_gather_facts.yml | 4 - .../1202-doc-gen-script-portability.yml | 4 - changelogs/fragments/1204-migrate-zos_apf.yml | 12 -- .../1209-zoau-migration-zos_job_submit.yml | 3 - ...1215-Migrate_zos_operator_action_query.yml | 4 - ...lidate_module_zos_job_output_migration.yml | 3 - .../fragments/1217-validate-job-query.yml | 3 - .../fragments/1218-migrate-zos_encode.yml | 3 - ...20-bugfix-zos_job_submit-default_value.yml | 4 - .../1222-zoau-migration-zos_copy.yml | 3 - .../fragments/1227-migrate-zos_archive.yml | 3 - ...228-zos_find-remove-zos_lineinfile_dep.yml | 3 - .../fragments/1229-migrate-zos_fetch.yml | 3 - .../fragments/1237-migrate-zos_mount.yml | 4 - .../fragments/1238-migrate-zos_unarchive.yml | 3 - .../1242-zoau-migration-zos_data_set.yml | 3 - ...Migrate_zos_blockinfile_and_lineinfile.yml | 4 - .../fragments/1257-zoau-import-zos_apf.yml | 3 - .../1261-job-submit-non-utf8-chars.yml | 9 -- .../1265_Migrate_zos_backup_restore.yml | 7 -- ...0-quick-fix-len-of-volumes-work-around.yml | 5 - ...-update-zos_archive-zos_unarchive-docs.yml | 5 - .../1292-doc-zos_tso_command-example.yml | 4 - .../fragments/1295-doc-zos_ping-scp.yml | 7 -- ...98-Remove_local_charset_from_zos_fetch.yml | 3 - .../fragments/1307-update-sanity-zos_copy.yml | 10 -- .../1320-Zos_mvs_raw_ignores_tmp_hlq.yml | 5 - .../1322-update-docstring-encode.yml | 3 - .../1331-update-docstring-ickdsf.yml | 3 - .../1332-update-docstring-import_handler.yml | 3 - .../fragments/1333-update-docstring-job.yml | 3 - .../1336-update-docstring-validation.yml | 3 - .../1340-Work_around_fix_false_positive.yml | 4 - ...re_than_0_doesn_not_put_change_as_true.yml | 5 - .../1347-update-docstring-zos_data_set.yml | 3 - .../1348-update-docstring-zos_encode.yml | 3 - .../1349-update-docstring-zos_fetch.yml | 3 - .../1350-update-docstring-zos_find.yml | 3 - ...1351-update-docstring-zos_gather_facts.yml | 3 - .../1352-update-docstring-zos_job_output.yml | 3 - .../1353-update-docstring-zos_job_query.yml | 3 - .../1354-update-docstring-zos_job_submit.yml | 3 - .../1355-update-docstring-zos_lineinfile.yml | 3 - .../1356-update-docstring-zos_mount.yml | 3 - .../fragments/1388-lowercase-choices.yml | 106 ------------------ .../1390-update-docstring-zos_script.yml | 3 - .../1391-update-docstring-zos_tso_command.yml | 3 - .../1392-update-docstring-zos_volume_init.yml | 3 - .../1393-update-docstring-zos_apf.yml | 3 - ...te_docstring-zos_operator_action_query.yml | 3 - .../fragments/1443-zos_find-filter-size.yml | 4 - .../fragments/692-changelog-lint-ac-tool.yml | 8 -- .../971-bug-job_submit-can-stacktrace.yml | 6 - changelogs/fragments/992-fix-sanity4to6.yml | 7 -- .../fragments/v1.10.0-beta.1_summary.yml | 6 - 71 files changed, 379 deletions(-) delete mode 100644 changelogs/fragments/1032-clean-job_submit-test.yml delete mode 100644 changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml delete mode 100644 changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml delete mode 100644 changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml delete mode 100644 changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml delete mode 100644 changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml delete mode 100644 changelogs/fragments/1169-util-job-zoau-migration.yml delete mode 100644 changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml delete mode 100644 changelogs/fragments/1181-zoau-migration-zos_operator.yml delete mode 100644 changelogs/fragments/1182-migrate-module-utils-data-set.yml delete mode 100644 changelogs/fragments/1183-copy-members.yml delete mode 100644 changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml delete mode 100644 changelogs/fragments/1187-migrate-module-utils-copy.yml delete mode 100644 changelogs/fragments/1188-migrate-module_utils-backup.yml delete mode 100644 changelogs/fragments/1189-migrate-module_utils-encode.yml delete mode 100644 changelogs/fragments/1190-migrate-module_utils-dd_statement.yml delete mode 100644 changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml delete mode 100644 changelogs/fragments/1202-doc-gen-script-portability.yml delete mode 100644 changelogs/fragments/1204-migrate-zos_apf.yml delete mode 100644 changelogs/fragments/1209-zoau-migration-zos_job_submit.yml delete mode 100644 changelogs/fragments/1215-Migrate_zos_operator_action_query.yml delete mode 100644 changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml delete mode 100644 changelogs/fragments/1217-validate-job-query.yml delete mode 100644 changelogs/fragments/1218-migrate-zos_encode.yml delete mode 100644 changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml delete mode 100644 changelogs/fragments/1222-zoau-migration-zos_copy.yml delete mode 100644 changelogs/fragments/1227-migrate-zos_archive.yml delete mode 100644 changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml delete mode 100644 changelogs/fragments/1229-migrate-zos_fetch.yml delete mode 100644 changelogs/fragments/1237-migrate-zos_mount.yml delete mode 100644 changelogs/fragments/1238-migrate-zos_unarchive.yml delete mode 100644 changelogs/fragments/1242-zoau-migration-zos_data_set.yml delete mode 100644 changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml delete mode 100644 changelogs/fragments/1257-zoau-import-zos_apf.yml delete mode 100644 changelogs/fragments/1261-job-submit-non-utf8-chars.yml delete mode 100644 changelogs/fragments/1265_Migrate_zos_backup_restore.yml delete mode 100644 changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml delete mode 100644 changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml delete mode 100644 changelogs/fragments/1292-doc-zos_tso_command-example.yml delete mode 100644 changelogs/fragments/1295-doc-zos_ping-scp.yml delete mode 100644 changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml delete mode 100644 changelogs/fragments/1307-update-sanity-zos_copy.yml delete mode 100644 changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml delete mode 100644 changelogs/fragments/1322-update-docstring-encode.yml delete mode 100644 changelogs/fragments/1331-update-docstring-ickdsf.yml delete mode 100644 changelogs/fragments/1332-update-docstring-import_handler.yml delete mode 100644 changelogs/fragments/1333-update-docstring-job.yml delete mode 100644 changelogs/fragments/1336-update-docstring-validation.yml delete mode 100644 changelogs/fragments/1340-Work_around_fix_false_positive.yml delete mode 100644 changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml delete mode 100644 changelogs/fragments/1347-update-docstring-zos_data_set.yml delete mode 100644 changelogs/fragments/1348-update-docstring-zos_encode.yml delete mode 100644 changelogs/fragments/1349-update-docstring-zos_fetch.yml delete mode 100644 changelogs/fragments/1350-update-docstring-zos_find.yml delete mode 100644 changelogs/fragments/1351-update-docstring-zos_gather_facts.yml delete mode 100644 changelogs/fragments/1352-update-docstring-zos_job_output.yml delete mode 100644 changelogs/fragments/1353-update-docstring-zos_job_query.yml delete mode 100644 changelogs/fragments/1354-update-docstring-zos_job_submit.yml delete mode 100644 changelogs/fragments/1355-update-docstring-zos_lineinfile.yml delete mode 100644 changelogs/fragments/1356-update-docstring-zos_mount.yml delete mode 100644 changelogs/fragments/1388-lowercase-choices.yml delete mode 100644 changelogs/fragments/1390-update-docstring-zos_script.yml delete mode 100644 changelogs/fragments/1391-update-docstring-zos_tso_command.yml delete mode 100644 changelogs/fragments/1392-update-docstring-zos_volume_init.yml delete mode 100644 changelogs/fragments/1393-update-docstring-zos_apf.yml delete mode 100644 changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml delete mode 100644 changelogs/fragments/1443-zos_find-filter-size.yml delete mode 100644 changelogs/fragments/692-changelog-lint-ac-tool.yml delete mode 100644 changelogs/fragments/971-bug-job_submit-can-stacktrace.yml delete mode 100644 changelogs/fragments/992-fix-sanity4to6.yml delete mode 100644 changelogs/fragments/v1.10.0-beta.1_summary.yml diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml deleted file mode 100644 index bb4248aec..000000000 --- a/changelogs/fragments/1032-clean-job_submit-test.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. - (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml deleted file mode 100644 index 44015bbd9..000000000 --- a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - remove zos_copy calls from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1152). diff --git a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml deleted file mode 100644 index ea8aacee9..000000000 --- a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - Remove zos_copy dependency from zos_archive test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1156). \ No newline at end of file diff --git a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml deleted file mode 100644 index 24f2802d5..000000000 --- a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - Remove zos_copy dependency from zos_encode test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1157). diff --git a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml deleted file mode 100644 index 9c8593c1a..000000000 --- a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove zos_copy dependency from zos_fetch test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1165). diff --git a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml deleted file mode 100644 index d7fb725af..000000000 --- a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_blockinfile - Remove zos_copy dependency from zos_blockinfile test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1167). \ No newline at end of file diff --git a/changelogs/fragments/1169-util-job-zoau-migration.yml b/changelogs/fragments/1169-util-job-zoau-migration.yml deleted file mode 100644 index 568aa9a4e..000000000 --- a/changelogs/fragments/1169-util-job-zoau-migration.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/job.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1169). diff --git a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml deleted file mode 100644 index a95e1c7e2..000000000 --- a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - Remove zos_encode dependency from zos_lineinfile test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1179). \ No newline at end of file diff --git a/changelogs/fragments/1181-zoau-migration-zos_operator.yml b/changelogs/fragments/1181-zoau-migration-zos_operator.yml deleted file mode 100644 index 7c107de88..000000000 --- a/changelogs/fragments/1181-zoau-migration-zos_operator.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_operator - Update internal functions to account for the change to the - unit of measurement of `timeout` now in centiseconds. - (https://github.com/ansible-collections/ibm_zos_core/pull/1181). \ No newline at end of file diff --git a/changelogs/fragments/1182-migrate-module-utils-data-set.yml b/changelogs/fragments/1182-migrate-module-utils-data-set.yml deleted file mode 100644 index 857327254..000000000 --- a/changelogs/fragments/1182-migrate-module-utils-data-set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/data_set.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1182). diff --git a/changelogs/fragments/1183-copy-members.yml b/changelogs/fragments/1183-copy-members.yml deleted file mode 100644 index b0b0c7896..000000000 --- a/changelogs/fragments/1183-copy-members.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. - (https://github.com/ansible-collections/ibm_zos_core/pull/1183). diff --git a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml deleted file mode 100644 index 9085743d9..000000000 --- a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Remove zos_fetch dependency from zos_copy test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1184). diff --git a/changelogs/fragments/1187-migrate-module-utils-copy.yml b/changelogs/fragments/1187-migrate-module-utils-copy.yml deleted file mode 100644 index 26157f9fc..000000000 --- a/changelogs/fragments/1187-migrate-module-utils-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/copy.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1187). diff --git a/changelogs/fragments/1188-migrate-module_utils-backup.yml b/changelogs/fragments/1188-migrate-module_utils-backup.yml deleted file mode 100644 index 65945d06b..000000000 --- a/changelogs/fragments/1188-migrate-module_utils-backup.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/backup.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1188). \ No newline at end of file diff --git a/changelogs/fragments/1189-migrate-module_utils-encode.yml b/changelogs/fragments/1189-migrate-module_utils-encode.yml deleted file mode 100644 index d7f471847..000000000 --- a/changelogs/fragments/1189-migrate-module_utils-encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/encode.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1189). diff --git a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml deleted file mode 100644 index 4bb3a582d..000000000 --- a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/dd_statement.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1190). diff --git a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml deleted file mode 100644 index 03f39b535..000000000 --- a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_gather_facts - Update module internally to leverage ZOAU python API - for zinfo. - (https://github.com/ansible-collections/ibm_zos_core/pull/1196). \ No newline at end of file diff --git a/changelogs/fragments/1202-doc-gen-script-portability.yml b/changelogs/fragments/1202-doc-gen-script-portability.yml deleted file mode 100644 index 3c2e6ddbb..000000000 --- a/changelogs/fragments/1202-doc-gen-script-portability.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - docs/scripts - Change to sed "-i" in place option which ensures compatibility between MacOS - and GNU versions of sed command. - (https://github.com/ansible-collections/ibm_zos_core/pull/1202). diff --git a/changelogs/fragments/1204-migrate-zos_apf.yml b/changelogs/fragments/1204-migrate-zos_apf.yml deleted file mode 100644 index 89db1abd2..000000000 --- a/changelogs/fragments/1204-migrate-zos_apf.yml +++ /dev/null @@ -1,12 +0,0 @@ -bugfixes: - - zos_apf - List option only returned one data set. Fix now returns - the list of retrieved data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - -minor_changes: - - zos_apf - Enhanced error messages when an exception is caught. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - -trivial: - - zos_apf - Migrated the module to use ZOAU v1.3.0 json schema. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). \ No newline at end of file diff --git a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml deleted file mode 100644 index 6f58e2713..000000000 --- a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Migrated the module to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1209). \ No newline at end of file diff --git a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml deleted file mode 100644 index be18056b3..000000000 --- a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_operator_action_query - Update internal functions to account for the change to the - unit of measurement of `timeout` now in centiseconds. - (https://github.com/ansible-collections/ibm_zos_core/pull/1215). \ No newline at end of file diff --git a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml deleted file mode 100644 index 65d3d3c08..000000000 --- a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_output - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1216). diff --git a/changelogs/fragments/1217-validate-job-query.yml b/changelogs/fragments/1217-validate-job-query.yml deleted file mode 100644 index df97c3ca6..000000000 --- a/changelogs/fragments/1217-validate-job-query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_query - Removed zos_job_submit wait argument from tests. - (https://github.com/ansible-collections/ibm_zos_core/pull/1217). \ No newline at end of file diff --git a/changelogs/fragments/1218-migrate-zos_encode.yml b/changelogs/fragments/1218-migrate-zos_encode.yml deleted file mode 100644 index 3d712b749..000000000 --- a/changelogs/fragments/1218-migrate-zos_encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1218). diff --git a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml deleted file mode 100644 index 83d2391ba..000000000 --- a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_job_submit - Was ignoring the default value for location=DATA_SET, now - when location is not specified it will default to DATA_SET. - (https://github.com/ansible-collections/ibm_zos_core/pull/1220). \ No newline at end of file diff --git a/changelogs/fragments/1222-zoau-migration-zos_copy.yml b/changelogs/fragments/1222-zoau-migration-zos_copy.yml deleted file mode 100644 index edc6eec06..000000000 --- a/changelogs/fragments/1222-zoau-migration-zos_copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Migrated the module to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1222). \ No newline at end of file diff --git a/changelogs/fragments/1227-migrate-zos_archive.yml b/changelogs/fragments/1227-migrate-zos_archive.yml deleted file mode 100644 index 820593c95..000000000 --- a/changelogs/fragments/1227-migrate-zos_archive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1227). diff --git a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml deleted file mode 100644 index 67642d563..000000000 --- a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_find - Removed zos_lineinfile dependency from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1228). diff --git a/changelogs/fragments/1229-migrate-zos_fetch.yml b/changelogs/fragments/1229-migrate-zos_fetch.yml deleted file mode 100644 index 07f9a26b4..000000000 --- a/changelogs/fragments/1229-migrate-zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1229). diff --git a/changelogs/fragments/1237-migrate-zos_mount.yml b/changelogs/fragments/1237-migrate-zos_mount.yml deleted file mode 100644 index d4787d42d..000000000 --- a/changelogs/fragments/1237-migrate-zos_mount.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - tests/functional/modules/test_zos_mount_func.py - migrate code to use - ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1237). diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml deleted file mode 100644 index 8afe97d29..000000000 --- a/changelogs/fragments/1238-migrate-zos_unarchive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1238). \ No newline at end of file diff --git a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml deleted file mode 100644 index 851783900..000000000 --- a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_data_set - Refactor data_set module_util and functional tests for ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1242). \ No newline at end of file diff --git a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml deleted file mode 100644 index e2e841e9c..000000000 --- a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_lineinfile - migrate code to use ZOAU v1.3.0. - - zos_blockinfile - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1256). diff --git a/changelogs/fragments/1257-zoau-import-zos_apf.yml b/changelogs/fragments/1257-zoau-import-zos_apf.yml deleted file mode 100644 index 71b46ba1b..000000000 --- a/changelogs/fragments/1257-zoau-import-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated ZOAU imports from the module to capture traceback. - (https://github.com/ansible-collections/ibm_zos_core/pull/1257). diff --git a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml deleted file mode 100644 index 7f322afe4..000000000 --- a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml +++ /dev/null @@ -1,9 +0,0 @@ -bugfixes: - - module_utils/job.py - job output containing non-printable characters would - crash modules. Fix now handles the error gracefully and returns a message - to the user inside `content` of the `ddname` that failed. - (https://github.com/ansible-collections/ibm_zos_core/pull/1261). -trivial: - - zos_job_submit - add test case to validate a bugfix in ZOAU v1.3.0 that - handles non-UTF8 characters correctly in a job's output. - (https://github.com/ansible-collections/ibm_zos_core/pull/1261). \ No newline at end of file diff --git a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml deleted file mode 100644 index 9afe4afc3..000000000 --- a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_backup_restore - Refactor zos_backup_restore module and functional tests for ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1265). -minor_changes: - - zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier - (HLQ) for temporary and backup. - (https://github.com/ansible-collections/ibm_zos_core/pull/1265). \ No newline at end of file diff --git a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml deleted file mode 100644 index 1f6ba201d..000000000 --- a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - module_utils/data_set.py - len(volme) was always called on receiving - DatasetVerificationError from Dataset.create() even though volumes=None was - a valid possible outcome. The fix adds a null check to the conditional. - (https://github.com/ansible-collections/ibm_zos_core/pull/1270). \ No newline at end of file diff --git a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml deleted file mode 100644 index ef213b06f..000000000 --- a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - zos_archive - Updated examples to use path instead of src. - (https://github.com/ansible-collections/ibm_zos_core/pull/1286). - - zos_unarchive - Updated examples and return dict to use path instead of src. - (https://github.com/ansible-collections/ibm_zos_core/pull/1286). \ No newline at end of file diff --git a/changelogs/fragments/1292-doc-zos_tso_command-example.yml b/changelogs/fragments/1292-doc-zos_tso_command-example.yml deleted file mode 100644 index 6ed868be7..000000000 --- a/changelogs/fragments/1292-doc-zos_tso_command-example.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_tso_command - Added an example on how to chain multiple TSO commands such - that they are invoked together when dependent on each other. - (https://github.com/ansible-collections/ibm_zos_core/pull/1293). \ No newline at end of file diff --git a/changelogs/fragments/1295-doc-zos_ping-scp.yml b/changelogs/fragments/1295-doc-zos_ping-scp.yml deleted file mode 100644 index a9477150d..000000000 --- a/changelogs/fragments/1295-doc-zos_ping-scp.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_ping - Update zos_ping documentation to instruct users how - to fall back to legacy SCP when using OpenSSH 9.0 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/1295). - - zos_ping - Update zos_ping REXX source to check for python - version 3.10 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/1295). \ No newline at end of file diff --git a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml deleted file mode 100644 index ca1ea840e..000000000 --- a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove argument not documented. - (https://github.com/ansible-collections/ibm_zos_core/pull/1298). \ No newline at end of file diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml deleted file mode 100644 index 858f0b64c..000000000 --- a/changelogs/fragments/1307-update-sanity-zos_copy.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_copy - Documented `group` and `owner` options. - (https://github.com/ansible-collections/ibm_zos_core/pull/1307). - -trivial: - - zos_copy - Removed many of the variables that were passed from the - action plugin to the module, reimplementing the logic inside the - module instead. Removed the use of temp_path variable inside zos_copy - in favor of using remote_src to deal with files copied to remote. - (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml deleted file mode 100644 index 058faf66e..000000000 --- a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. - Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created - during the module execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/changelogs/fragments/1322-update-docstring-encode.yml b/changelogs/fragments/1322-update-docstring-encode.yml deleted file mode 100644 index dd5eb5389..000000000 --- a/changelogs/fragments/1322-update-docstring-encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - encode - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1322). \ No newline at end of file diff --git a/changelogs/fragments/1331-update-docstring-ickdsf.yml b/changelogs/fragments/1331-update-docstring-ickdsf.yml deleted file mode 100644 index 545ba95c1..000000000 --- a/changelogs/fragments/1331-update-docstring-ickdsf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - ickdsf - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1331). \ No newline at end of file diff --git a/changelogs/fragments/1332-update-docstring-import_handler.yml b/changelogs/fragments/1332-update-docstring-import_handler.yml deleted file mode 100644 index 5b32cd32e..000000000 --- a/changelogs/fragments/1332-update-docstring-import_handler.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - import_handler - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1332). \ No newline at end of file diff --git a/changelogs/fragments/1333-update-docstring-job.yml b/changelogs/fragments/1333-update-docstring-job.yml deleted file mode 100644 index 124ef2cae..000000000 --- a/changelogs/fragments/1333-update-docstring-job.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - job - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1333). \ No newline at end of file diff --git a/changelogs/fragments/1336-update-docstring-validation.yml b/changelogs/fragments/1336-update-docstring-validation.yml deleted file mode 100644 index 547103d46..000000000 --- a/changelogs/fragments/1336-update-docstring-validation.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - validation - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1336). \ No newline at end of file diff --git a/changelogs/fragments/1340-Work_around_fix_false_positive.yml b/changelogs/fragments/1340-Work_around_fix_false_positive.yml deleted file mode 100644 index 8e8360808..000000000 --- a/changelogs/fragments/1340-Work_around_fix_false_positive.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_blockinfile - Using double quotation marks inside a block resulted in a false - positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. - (https://github.com/ansible-collections/ibm_zos_core/pull/1340). \ No newline at end of file diff --git a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml deleted file mode 100644 index a09b8fa64..000000000 --- a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_job_submit - when the argument max_rc was different than 0 the changed response returned - as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above - or equal to the value of the job. - (https://github.com/ansible-collections/ibm_zos_core/pull/1345). \ No newline at end of file diff --git a/changelogs/fragments/1347-update-docstring-zos_data_set.yml b/changelogs/fragments/1347-update-docstring-zos_data_set.yml deleted file mode 100644 index 581ab1aa9..000000000 --- a/changelogs/fragments/1347-update-docstring-zos_data_set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_data_set - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1347). \ No newline at end of file diff --git a/changelogs/fragments/1348-update-docstring-zos_encode.yml b/changelogs/fragments/1348-update-docstring-zos_encode.yml deleted file mode 100644 index de9c11c17..000000000 --- a/changelogs/fragments/1348-update-docstring-zos_encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1348). \ No newline at end of file diff --git a/changelogs/fragments/1349-update-docstring-zos_fetch.yml b/changelogs/fragments/1349-update-docstring-zos_fetch.yml deleted file mode 100644 index a38504c36..000000000 --- a/changelogs/fragments/1349-update-docstring-zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1349). \ No newline at end of file diff --git a/changelogs/fragments/1350-update-docstring-zos_find.yml b/changelogs/fragments/1350-update-docstring-zos_find.yml deleted file mode 100644 index 48c1fbce1..000000000 --- a/changelogs/fragments/1350-update-docstring-zos_find.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_find - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1350). \ No newline at end of file diff --git a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml deleted file mode 100644 index 31fe8dfda..000000000 --- a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_gather_facts - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1351). \ No newline at end of file diff --git a/changelogs/fragments/1352-update-docstring-zos_job_output.yml b/changelogs/fragments/1352-update-docstring-zos_job_output.yml deleted file mode 100644 index 78aac0cac..000000000 --- a/changelogs/fragments/1352-update-docstring-zos_job_output.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_output - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1352). \ No newline at end of file diff --git a/changelogs/fragments/1353-update-docstring-zos_job_query.yml b/changelogs/fragments/1353-update-docstring-zos_job_query.yml deleted file mode 100644 index 550be9107..000000000 --- a/changelogs/fragments/1353-update-docstring-zos_job_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_query - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1353). \ No newline at end of file diff --git a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml deleted file mode 100644 index c2c0a4b99..000000000 --- a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1354). \ No newline at end of file diff --git a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml deleted file mode 100644 index 3840b2862..000000000 --- a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1355). \ No newline at end of file diff --git a/changelogs/fragments/1356-update-docstring-zos_mount.yml b/changelogs/fragments/1356-update-docstring-zos_mount.yml deleted file mode 100644 index a2c09caa5..000000000 --- a/changelogs/fragments/1356-update-docstring-zos_mount.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_mount - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1356). \ No newline at end of file diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml deleted file mode 100644 index e181d8b0f..000000000 --- a/changelogs/fragments/1388-lowercase-choices.yml +++ /dev/null @@ -1,106 +0,0 @@ -breaking_changes: - - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase - choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_backup_restore - option ``space_type`` no longer accepts uppercase - choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``space_type`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``record_format`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should - replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_submit - option ``location`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``fs_type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``automove`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of - ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. - This also applies when defining a ``dd_data_set`` inside ``dd_concat``. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - -trivial: - - zos_blockinfile - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_find - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_lineinfile - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_encode - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_fetch - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_output - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_query - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). \ No newline at end of file diff --git a/changelogs/fragments/1390-update-docstring-zos_script.yml b/changelogs/fragments/1390-update-docstring-zos_script.yml deleted file mode 100644 index 792bf9698..000000000 --- a/changelogs/fragments/1390-update-docstring-zos_script.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_script - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1390). \ No newline at end of file diff --git a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml deleted file mode 100644 index c435799d4..000000000 --- a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_tso_command - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1391). \ No newline at end of file diff --git a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml deleted file mode 100644 index 4536f186c..000000000 --- a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_volume_init - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1392). \ No newline at end of file diff --git a/changelogs/fragments/1393-update-docstring-zos_apf.yml b/changelogs/fragments/1393-update-docstring-zos_apf.yml deleted file mode 100644 index 8a89b7aa0..000000000 --- a/changelogs/fragments/1393-update-docstring-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1393). \ No newline at end of file diff --git a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml deleted file mode 100644 index 25c34fd89..000000000 --- a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator_action_query - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1394). \ No newline at end of file diff --git a/changelogs/fragments/1443-zos_find-filter-size.yml b/changelogs/fragments/1443-zos_find-filter-size.yml deleted file mode 100644 index a5a8ce029..000000000 --- a/changelogs/fragments/1443-zos_find-filter-size.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size - for PDS/Es. - (https://github.com/ansible-collections/ibm_zos_core/pull/1443). \ No newline at end of file diff --git a/changelogs/fragments/692-changelog-lint-ac-tool.yml b/changelogs/fragments/692-changelog-lint-ac-tool.yml deleted file mode 100644 index cbf6bab7d..000000000 --- a/changelogs/fragments/692-changelog-lint-ac-tool.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - ac - Added new command ac-changelog into ac tool to run changelog - fragments lint and changelog release generation. - (https://github.com/ansible-collections/ibm_zos_core/pull/1304). - - - workflows/ac_changelog - Added new github action that will lint - changelog fragments upon a new pull request. - (https://github.com/ansible-collections/ibm_zos_core/pull/1304). \ No newline at end of file diff --git a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml deleted file mode 100644 index e02daed4c..000000000 --- a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml +++ /dev/null @@ -1,6 +0,0 @@ -trivial: - - job.py - generalized resolution of query_exception that may be thrown. - This should prevent the stack trace. - (https://github.com/ansible-collections/ibm_zos_core/pull/1383). - - test_zos_job_submit.py - Removed code that was hiding if a duration was not returned. - (https://github.com/ansible-collections/ibm_zos_core/pull/1383). diff --git a/changelogs/fragments/992-fix-sanity4to6.yml b/changelogs/fragments/992-fix-sanity4to6.yml deleted file mode 100644 index 3d9637c63..000000000 --- a/changelogs/fragments/992-fix-sanity4to6.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_data_set.py - Corrected references to input variable definitions - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). - - data_set.py - Updated exception handler to match what was returned. - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). - - test_zos_data_set_func.py - Removed test of discontinued function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). diff --git a/changelogs/fragments/v1.10.0-beta.1_summary.yml b/changelogs/fragments/v1.10.0-beta.1_summary.yml deleted file mode 100644 index 0c1e35217..000000000 --- a/changelogs/fragments/v1.10.0-beta.1_summary.yml +++ /dev/null @@ -1,6 +0,0 @@ -release_summary: | - Release Date: '2024-05-08' - This changelog describes all changes made to the modules and plugins included - in this collection. The release date is the date the changelog is created. - For additional details such as required dependencies and availability review - the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From 31fcb4402a1fd245b2411558354c125d68b759d9 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 10:33:50 -0700 Subject: [PATCH 385/413] Updted columan to includ GA and reflect correct date for 1.10 Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index f304cba93..7b0259c4a 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -5,7 +5,7 @@ Releases and maintenance This table describes the collections release dates, dependency versions and End of Life dates (EOL). The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally each quarter -a beta is released followed by a GA version.We can extend this cycle to properly implement and test larger +a beta is released followed by a GA version. We can extend this cycle to properly implement and test larger changes before a new release is made available. These are the component versions available when the collection is made generally available. The underlying @@ -18,9 +18,9 @@ enters into EOL, then a newer version of ansible-core must be used. Support Matrix ============== +---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| Version | Released | ansible-core | Ansible | AAP | End of Life | Control Node | Managed Node | +| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node | Managed Node | +=========+===============+==============+=========+=======+===============+====================+============================================================================+ -| 1.10.x | 16 May 2024 | >=2.15.x | >=8.0.x | >=2.4 | 16 May 2026 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | | | | | | | | | - z/OS shell | | | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | | | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | From 0ce6db68f48419da232e3af050d9dd179f955117 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 10:34:24 -0700 Subject: [PATCH 386/413] Update doc copyright year Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/requirements-single.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst index e31c9636a..a230b9af6 100644 --- a/docs/source/requirements-single.rst +++ b/docs/source/requirements-single.rst @@ -2,7 +2,7 @@ .. Auto generated restructured text . .. ........................................................................... .. ........................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2024 . .. ........................................................................... ============ @@ -32,7 +32,6 @@ The managed z/OS node is the host that is managed by Ansible, as identified in the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, some dependencies are required to be installed on z/OS such as: -* `z/OS`_ * `z/OS`_ * `z/OS OpenSSH`_ * `z/OS® shell`_ From 19580a4a3ed16061a733161d0f0f2cb55e5a8adb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 10:35:12 -0700 Subject: [PATCH 387/413] Update copyright year to 2024 and correct formatting for others Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/action/zos_copy.py | 2 +- plugins/action/zos_job_submit.py | 2 +- plugins/action/zos_unarchive.py | 2 +- plugins/filter/wtor.py | 2 +- plugins/module_utils/copy.py | 2 +- plugins/module_utils/dd_statement.py | 2 +- plugins/module_utils/ickdsf.py | 2 +- plugins/module_utils/import_handler.py | 2 +- plugins/module_utils/validation.py | 2 +- plugins/module_utils/zos_mvs_raw.py | 2 +- plugins/modules/zos_job_query.py | 2 +- tests/functional/modules/test_zos_apf_func.py | 2 +- tests/functional/modules/test_zos_archive_func.py | 2 +- tests/functional/modules/test_zos_copy_func.py | 2 +- tests/functional/modules/test_zos_data_set_func.py | 2 +- tests/functional/modules/test_zos_encode_func.py | 2 +- tests/functional/modules/test_zos_fetch_func.py | 2 +- tests/functional/modules/test_zos_find_func.py | 2 +- tests/functional/modules/test_zos_gather_facts_func.py | 2 +- tests/functional/modules/test_zos_job_output_func.py | 2 +- tests/functional/modules/test_zos_mvs_raw_func.py | 2 +- tests/functional/modules/test_zos_operator_func.py | 2 +- 22 files changed, 22 insertions(+), 22 deletions(-) diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index e177cadd6..d76a7032d 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 23c31cb95..67047b648 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index ed508bcf0..b0a1fa466 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py index 2ef3a3cbb..483fbdb73 100644 --- a/plugins/filter/wtor.py +++ b/plugins/filter/wtor.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 71b47c974..68e2e8385 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index 91ae2a37a..b052f2574 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py index 436750c21..7081e2163 100644 --- a/plugins/module_utils/ickdsf.py +++ b/plugins/module_utils/ickdsf.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py index 507dd2f65..7b5031216 100644 --- a/plugins/module_utils/import_handler.py +++ b/plugins/module_utils/import_handler.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py index fe41c0a01..a645d3362 100644 --- a/plugins/module_utils/validation.py +++ b/plugins/module_utils/validation.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 466775939..bc865d098 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index fee64bdb1..be2bb513f 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 725eff59d..7c19ea31a 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index f6b1140fa..e01994138 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index d3c685fad..086b7d27e 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 9a8880f02..7dc0ed7f3 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 4b74c8834..df01a6133 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 4d72a6cc5..7fd44651e 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 42a8db23e..067a2f192 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_gather_facts_func.py b/tests/functional/modules/test_zos_gather_facts_func.py index f2861c596..0d28b8f25 100644 --- a/tests/functional/modules/test_zos_gather_facts_func.py +++ b/tests/functional/modules/test_zos_gather_facts_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 - 2024 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index e92d377d4..96bc0b2bc 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index aa01ed952..3e97f6026 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 6891cffa8..d60d26ec2 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 7df715663c756f781f0f6d37d96122d237735ee6 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 11:18:21 -0700 Subject: [PATCH 388/413] Update test case comment with JIRA info Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_lineinfile_func.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index cd1421f41..a9a29227d 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -675,7 +675,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): finally: remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): @@ -698,7 +698,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): # finally: # remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): @@ -721,7 +721,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): # finally: # remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): @@ -744,7 +744,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): # finally: # remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): From 6a2efd75f8c54da801087e5fc748a46772acf23c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 11:31:52 -0700 Subject: [PATCH 389/413] Update README to add tittle Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e8f8f2879..6798d19e9 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ -# Title of Collection -The **IBM® z/OS® core collection** enables Ansible to interact with z/OS Data Sets and USS files. The collection +# IBM® z/OS® core collection +The **IBM z/OS core collection** enables Ansible to interact with z/OS Data Sets and USS files. The collection focuses on operating system fundamental operations such as managing encodings, creating data sets and submitting jobs. ### Description -The **IBM® z/OS® core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible Automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, program authorizations, operator operations and execute both JES and MVS commands as well as execute shell, python and REXX scripts. It supports data set creation, searching, copying, fetching and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. +The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible Automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, program authorizations, operator operations and execute both JES and MVS commands as well as execute shell, python and REXX scripts. It supports data set creation, searching, copying, fetching and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. System programers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. From 99213d4b677861af7535cdc9d8824f35ed50f352 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 13:15:05 -0700 Subject: [PATCH 390/413] Update Readme with suggestions Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 58 +++++++++++++++++++++++++------------------------------ 1 file changed, 26 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index 6798d19e9..a5d81fa50 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,14 @@ # IBM® z/OS® core collection -The **IBM z/OS core collection** enables Ansible to interact with z/OS Data Sets and USS files. The collection -focuses on operating system fundamental operations such as managing encodings, creating data sets and submitting -jobs. +The **IBM z/OS core** collection enables Ansible to interact with z/OS Data Sets and USS files. The collection focuses on operating system fundamental operations such as managing encodings, creating data sets, and submitting jobs. ### Description -The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible Automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, program authorizations, operator operations and execute both JES and MVS commands as well as execute shell, python and REXX scripts. It supports data set creation, searching, copying, fetching and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. +The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, perform program authorizations, run operator operations, and execute both JES and MVS commands as well as execute shell, python, and REXX scripts. It supports data set creation, searching, copying, fetching, and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. -System programers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. +System programmers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. ## Requirements Before you install the IBM z/OS core collection, you must configure a control node and managed node with a minimum set of requirements. -The following [table](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html) details the specific software requirements for the controller and managed Node. +The following [table](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html) details the specific software requirements for the controller and managed node. ### Ansible Controller * This release of the collection requires **ansible-core >=2.15** (Ansible >=8.x), for additional requirements such as Python, review the [support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). @@ -22,7 +20,7 @@ This release of the collection requires the following: * [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. * [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau) 1.3.0 or later. -### Installation +## Installation Before using this collection, you need to install it with the Ansible Galaxy command-line tool: ``` @@ -50,7 +48,7 @@ You can also install a specific version of the collection, for example, if you n ansible-galaxy collection install ibm.ibm_zos_core:1.0.0 ``` -You can also install a beta version of the collection. A Beta version is only available on Galaxy and is only supported by community until it is promoted to Ansible Automation Platform. Use the following syntax to install a beta version: +You can also install a beta version of the collection. A beta version is only available on Galaxy and is only supported by the community until it is promoted to General Availability (GA). Use the following syntax to install a beta version: ``` ansible-galaxy collection install ibm.ibm_zos_core:1.10.0-beta.1 @@ -82,27 +80,25 @@ environment_vars: ``` ## Use Cases -This section should outline in detail 3-5 common use cases for the collection. These should be informative examples of how the collection has been used, or how you'd like to see it be used. - * Use Case Name: Add a new z/OS User * Actors: * Application Developer * Description: * An application developer can submit a new user request for the system admin to approve. * Flow: - * Verify user does not exist; create home directory, password and passphrase + * Verify user does not exist; create home directory, password, and passphrase * Create home directory and the user to the system - * Provide access to resource, add to system groups and define an alias + * Provide access to resource, add to system groups, and define an alias * Create the users ISPROF data set - * Create user private data set, mount with persistance + * Create user private data set, mount with persistence * Generate email with login credentials * Use Case Name: Automate certificate renewals * Actors: * System Admin * Description: - * The system admin can automate certificate renewals, no longer requiring manual intervention. + * The system administrator can automate certificate renewals * Flow: - * Setup and configure and run z/OS Health Checker to generate a report + * Setup, configure and run z/OS Health Checker to generate a report * Search the Health Checker report for expiring certificates * Renew expiring certificates * Collect expiring certificate attributes and backup certificate @@ -114,21 +110,20 @@ This section should outline in detail 3-5 common use cases for the collection. T * Actors: * Application Developer * Description: - * An application developer can provision an application runtime that accelerates the delivery of cloud-native applications, + * An application developer can provision an application runtime that accelerates the delivery of cloud-native applications. * Flow: * Create and mount a file system for the Liberty profile. - * Creating a Liberty Profile instance with optional configurations. - * Enabling z/OS authorized services for the Liberty profile. - * Starting an angel process or a server process + * Create a Liberty Profile instance with optional configurations. + * Enable z/OS authorized services for the Liberty profile. + * Start an angel process or a server process -### Testing -All releases, including beta's will have: +## Testing +All releases, including betas will have: * 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. * 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of [ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests). -* 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst) analyzes +* 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst). * 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives. -### Environments This release of the collection was tested with: * ansible-core v2.15.x * Python 3.9.x @@ -136,8 +131,7 @@ This release of the collection was tested with: * IBM Z Open Automation Utilities (ZOAU) 1.3.0.x * z/OS V2R5 -### Known Exceptions -This release of the collection has no known exceptions or workarounds, but this release does introduce case sensitivity for option values. This release includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values should be migrated to ensure case sensitivity does not affect a module. +This release introduces case sensitivity for option values and includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values will need to be updated. ## Contributing This community is not currently accepting contributions. However, we encourage you to open [git issues](https://github.com/ansible-collections/ibm_zos_core/issues) for bugs, comments or feature requests and check back periodically for when community contributions will be accepted in the near future. @@ -149,25 +143,25 @@ If you would like to communicate with this community, you can do so through: * GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). * GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). * [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. -* Discord [System Z Enthusiasts](https://forum.ansible.com/) room [Ansible](https://discord.gg/nKC8F89v). -* Matrix Ansible room [Ansible z/OS](#ansible-zos:matrix.org). -* Ansible community [Matrix rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). +* Discord [System Z Enthusiasts](https://forum.ansible.com/) room [ansible](https://discord.gg/nKC8F89v). +* Matrix Ansible room [ansible-zos](#ansible-zos:matrix.org). +* Ansible community Matrix [rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). ## Support -As Red Hat Ansible [Certified Content](https://catalog.redhat.com/software/search?target_platforms=Red%20Hat%20Ansible%20Automation%20Platform), this collection is entitled to [support](https://access.redhat.com/support/) through [Ansible Automation platform](https://www.redhat.com/en/technologies/management/ansible) (AAP). After creating a Red Hat support case, if it is decided the issue belongs to IBM, Red Hat will ask that [an IBM support case](https://www.ibm.com/mysupport/s/createrecord/NewCase) be created and share the case number with Red Hat so that a collaboration can begin between Red Hat and IBM. +As Red Hat Ansible [Certified Content](https://catalog.redhat.com/software/search?target_platforms=Red%20Hat%20Ansible%20Automation%20Platform), this collection is entitled to [support](https://access.redhat.com/support/) through [Ansible Automation Platform](https://www.redhat.com/en/technologies/management/ansible) (AAP). After creating a Red Hat support case, if it is determined the issue belongs to IBM, Red Hat will instruct you to create an [IBM support case](https://www.ibm.com/mysupport/s/createrecord/NewCase) and share the case number with Red Hat so that a collaboration can begin between Red Hat and IBM. -If a support case can not be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible). +If a support case cannot be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible). The current supported versions of this collection can be found listed under the [release section](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). ## Release Notes and Roadmap -The collections cumulative release notes can be reviewed [here](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). Note, some collections release before an ansible-core version reaches End of Life (EOL), thus the version of ansible-core that is supported must be a version that is currently supported. +The collection's cumulative release notes can be reviewed [here](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). Note, some collections release before an ansible-core version reaches End of Life (EOL), thus the version of ansible-core that is supported must be a version that is currently supported. For AAP users, to see the supported ansible-core versions, review the [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform). For Galaxy and GitHub users, to see the supported ansible-core versions, review the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). -The collections changelogs can be reviewed in the following table. +The collection's changelogs can be reviewed in the following table. | Version | ansible-core | Ansible | Status | |---------|--------------|---------|----------------------------| | 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| From dde3bb589beab6a57276e8d2c97deda72292d414 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 13:20:54 -0700 Subject: [PATCH 391/413] update readme to reflect preview status for a beta Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a5d81fa50..85d2f3104 100644 --- a/README.md +++ b/README.md @@ -165,7 +165,7 @@ The collection's changelogs can be reviewed in the following table. | Version | ansible-core | Ansible | Status | |---------|--------------|---------|----------------------------| | 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| -| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | May 2024 | +| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | In Preview | | [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 | | [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 | | [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 | From 0214e88e483f91270a6e79a8744d76522302a02c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 13:21:35 -0700 Subject: [PATCH 392/413] update readme to reflect preview status for a beta Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 85d2f3104..fd34de952 100644 --- a/README.md +++ b/README.md @@ -165,7 +165,7 @@ The collection's changelogs can be reviewed in the following table. | Version | ansible-core | Ansible | Status | |---------|--------------|---------|----------------------------| | 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| -| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | In Preview | +| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | In preview | | [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 | | [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 | | [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 | From af377bcefe93d302067b6afa3e8fd3e2558cc6d2 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 15:35:02 -0700 Subject: [PATCH 393/413] Update README with installation updates Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index fd34de952..90df543a0 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ Before using this collection, you need to install it with the Ansible Galaxy com ansible-galaxy collection install ibm.ibm_zos_core ``` -You can also include it in a requirements.yml file and install it with ansible-galaxy collection install -r requirements.yml, using the format: +You can also include it in a requirements.yml file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format: ``` collections: @@ -58,17 +58,27 @@ As part of the installation, the collection [requirements](#Requirements) must b If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657). +To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method). If the wheel is installed using the `--target` option, it will install the package into the specified directory, if the wheel is installed using the `--user` option, it will install the package into the user directory which will then need to have `PYTHONPATH` configured to where the packages is installed, e.g; `PYTHONPATH: /u/user`. + +If the ZOAU Python wheel package is installed using either `--target` or `--user`, uncomment the following line in the environment vars section. +``` +ZOAU_PYTHONPATH: "{{ path_to_wheel_installation_directory }}" +``` + +Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions. + The environment variables: ``` PYZ: "path_to_python_installation_on_zos_target" ZOAU: "path_to_zoau_installation_on_zos_target" +# ZOAU_PYTHONPATH: "path_to_zoau_wheel_installation_directory" ansible_python_interpreter: "{{ PYZ }}/bin/python3" environment_vars: _BPXK_AUTOCVT: "ON" ZOAU_HOME: "{{ ZOAU }}" - PYTHONPATH: "{{ ZOAU }}/lib" + PYTHONPATH: "{{ ZOAU_PYTHONPATH }}" LIBPATH: "{{ ZOAU }}/lib:{{ PYZ }}/lib:/lib:/usr/lib:." PATH: "{{ ZOAU }}/bin:{{ PYZ }}/bin:/bin:/var/bin" _CEE_RUNOPTS: "FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)" From 6005f4c903ee451268bfcc25beaa87086a1868ac Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 15 May 2024 23:35:30 -0700 Subject: [PATCH 394/413] Update support matrix Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 7b0259c4a..e92023358 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -4,16 +4,20 @@ Releases and maintenance This table describes the collections release dates, dependency versions and End of Life dates (EOL). -The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally each quarter -a beta is released followed by a GA version. We can extend this cycle to properly implement and test larger -changes before a new release is made available. +The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally, each quarter +a beta is released followed by a GA version. Occasionally, the cycle may be extended to properly implement and +test larger changes before a new release is made available. -These are the component versions available when the collection is made generally available. The underlying -component version is likely to change as they reach EOL, thus components must be a version that is +These are the component versions available when the collection was made generally available. The underlying +component version is likely to change as it reaches EOL, thus components must be a version that is currently supported. For example, if a collection releases with a minimum version of ``ansible-core`` of 2.11.0, and later this -enters into EOL, then a newer version of ansible-core must be used. +enters into EOL, then a newer and supported version of ansible-core must be used. + +End of Life (EOL) for this collection is generally a 2-year cycle unless a dependency reaches EOL prior to the 2 years. +For example, if a collection releases and its dependency reaches EOL 1 year later, then the collection will EOL at the +same time as the dependency, 1 year later. Support Matrix ============== @@ -25,22 +29,22 @@ Support Matrix | | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | | | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | +---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.9.x | 05 Feb 2024 | >=2.14.x | >=7.0.x | >=2.3 | 05 Feb 2026 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| 1.9.x | 05 Feb 2024 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | | | | | | | | |- z/OS shell | | | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | | | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to version 1.3.0| +---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.8.x | 13 Dec 2023 | >=2.14.x | >=7.0.x | >=2.3 | 13 Dec 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| 1.8.x | 13 Dec 2023 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | | | | | | | | |- z/OS shell | | | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | | | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to version 1.3.0| +---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.7.x | 10 Oct 2023 | >=2.14.x | >=7.0.x | >=2.3 | 10 Oct 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| 1.7.x | 10 Oct 2023 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | | | | | | | | |- z/OS shell | | | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | | | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to version 1.3.0| +---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.6.x | 28 June 2023 | >=2.14.x | >=7.0.x | >=2.3 | 28 June 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| 1.6.x | 28 June 2023 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | | | | | | | | |- z/OS shell | | | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | | | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to version 1.3.0| From dae7c371c4919a1226cf804f71d57b8e1830e7cc Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 00:10:10 -0700 Subject: [PATCH 395/413] update support matrix with new information Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index e92023358..5d460e4ef 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -21,36 +21,36 @@ same time as the dependency, 1 year later. Support Matrix ============== -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node | Managed Node | -+=========+===============+==============+=========+=======+===============+====================+============================================================================+ -| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.9.x | 05 Feb 2024 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to version 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.8.x | 13 Dec 2023 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to version 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.7.x | 10 Oct 2023 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to version 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.6.x | 28 June 2023 | >=2.14.x | >=7.0.x | >=2.3 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to version 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ -| 1.5.x | 25 April 2023 | >=2.14.x | >=7.0.x | >=2.3 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to version 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+--------------------+----------------------------------------------------------------------------+ \ No newline at end of file ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | ++=========+===============+==============+=========+=======+===============+=====================+====================================================================+ +| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.8.x | 13 Dec 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.7.x | 10 Oct 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.6.x | 28 June 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ \ No newline at end of file From 6916422265c9fda201588b4dc2b013b2d434cca5 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 00:19:01 -0700 Subject: [PATCH 396/413] update support matrix with new information Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 5d460e4ef..7979ed389 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -24,10 +24,10 @@ Support Matrix +---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ | Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | +=========+===============+==============+=========+=======+===============+=====================+====================================================================+ -| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | +| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.3.0 or later | +---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ | 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | | | | | | | | |- z/OS shell | From 10d3d642b621cef41fe445a396a88a18a55e5da8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 00:20:33 -0700 Subject: [PATCH 397/413] update support matrix with new information Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 7979ed389..8a919a7f5 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -21,36 +21,36 @@ same time as the dependency, 1 year later. Support Matrix ============== -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | -+=========+===============+==============+=========+=======+===============+=====================+====================================================================+ -| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.3.0 or later | -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.8.x | 13 Dec 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.7.x | 10 Oct 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.6.x | 28 June 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ \ No newline at end of file ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ +| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | ++=========+===============+==============+=========+=======+===============+=====================+=====================================================================+ +| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ +| 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.2.5 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ +| 1.8.x | 13 Dec 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ +| 1.7.x | 10 Oct 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.2.3 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ +| 1.6.x | 28 June 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ +| 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | +| | | | | | | | - z/OS shell | +| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | | - IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ \ No newline at end of file From 4613ce92d3c267aaf6354ca6dd9d162c4ac05efb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 00:21:18 -0700 Subject: [PATCH 398/413] update support matrix with new information Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 8a919a7f5..7fed93244 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -21,36 +21,36 @@ same time as the dependency, 1 year later. Support Matrix ============== -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ -| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | -+=========+===============+==============+=========+=======+===============+=====================+=====================================================================+ -| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.3.0 or later | -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ -| 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.2.5 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ -| 1.8.x | 13 Dec 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ -| 1.7.x | 10 Oct 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.2.3 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ -| 1.6.x | 28 June 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ -| 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 | - z/OS V2R4 - V2R5 | -| | | | | | | | - z/OS shell | -| | | | | | | | - IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | | - IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+---------------------------------------------------------------------+ \ No newline at end of file ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | ++=========+===============+==============+=========+=======+===============+=====================+====================================================================+ +| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.3.0 or later | ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.8.x | 13 Dec 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.7.x | 10 Oct 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.6.x | 28 June 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ +| 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | +| | | | | | | |- z/OS shell | +| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | +| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| ++---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ \ No newline at end of file From 9d7b12efde9605d65b58a01e973400719e363e58 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 08:26:50 -0700 Subject: [PATCH 399/413] Update 1.5.0 zoau dependency version Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 7fed93244..47a570d25 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -52,5 +52,5 @@ Support Matrix | 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | | | | | | | | |- z/OS shell | | | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| +| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| +---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ \ No newline at end of file From c8ab45af6df40586fa278f85beaca8de29450b09 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 14:20:40 -0700 Subject: [PATCH 400/413] Update readme to better explain wheels Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 90df543a0..4cfe70a40 100644 --- a/README.md +++ b/README.md @@ -58,20 +58,18 @@ As part of the installation, the collection [requirements](#Requirements) must b If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657). -To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method). If the wheel is installed using the `--target` option, it will install the package into the specified directory, if the wheel is installed using the `--user` option, it will install the package into the user directory which will then need to have `PYTHONPATH` configured to where the packages is installed, e.g; `PYTHONPATH: /u/user`. +To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method). -If the ZOAU Python wheel package is installed using either `--target` or `--user`, uncomment the following line in the environment vars section. -``` -ZOAU_PYTHONPATH: "{{ path_to_wheel_installation_directory }}" -``` +If the wheel is installed using the `--target` option, it will install the package into the specified target directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /usr/zoau/wheels`. Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions or persist +after an update. -Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions. +If the wheel is installed using the `--user` option, it will install the package into the user directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /u/user` -The environment variables: +Environment variables: ``` PYZ: "path_to_python_installation_on_zos_target" ZOAU: "path_to_zoau_installation_on_zos_target" -# ZOAU_PYTHONPATH: "path_to_zoau_wheel_installation_directory" +ZOAU_PYTHONPATH: "path_to_zoau_wheel_installation_directory" ansible_python_interpreter: "{{ PYZ }}/bin/python3" From 8818fa4f33ceb653e88855b92395c738c8a5970c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 14:46:44 -0700 Subject: [PATCH 401/413] Updat release notes with support matrix Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 70 ++++++++++++++++------------------- 1 file changed, 32 insertions(+), 38 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 751c8c337..df06d149c 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,5 +1,5 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2024 . +.. © Copyright IBM Corporation 2020, 2024 . .. ........................................................................... ======== @@ -15,6 +15,8 @@ Major Changes - Starting with IBM Ansible z/OS core version 1.10.x, ZOAU version 1.3.0 will be required. - Starting with IBM Ansible z/OS core version 1.10.x, all module options are case sensitive, review the porting guide for specifics. +- The README has been updated with a new template. +- The *Reference* section has been renamed to *Requirements" and now includes a support matrix. Minor Changes ------------- @@ -34,7 +36,7 @@ Bugfixes - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. -- zos_mvs_raw - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1320). +- ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. Porting Guide ------------- @@ -95,13 +97,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ -* Supported by IBM `Z Open Automation Utilities 1.3.0`_ or later. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.9.0 ============= @@ -222,13 +222,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ -* Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.8.0 ============= @@ -308,13 +306,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.7.0 ============= @@ -375,13 +371,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.6.0 ============= @@ -438,13 +432,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.5.0 ============= @@ -554,13 +546,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. .. ............................................................................. .. Global Links @@ -583,6 +573,8 @@ Reference https://www.ibm.com/docs/en/python-zos/3.11 .. _3.12: https://www.ibm.com/docs/en/python-zos/3.12 +.. _Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau/latest .. _Z Open Automation Utilities 1.1.0: https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: @@ -609,6 +601,8 @@ Reference https://www.ibm.com/docs/en/zos .. _FAQs: https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html +.. _z/OS core support matrix: + https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html .. ............................................................................. .. Playbook Links From cd6e84a4918755910f2da1161e9980c428a1d516 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 14:47:27 -0700 Subject: [PATCH 402/413] Update support matrix to remove versions Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 114 ++++++++++++------ 1 file changed, 75 insertions(+), 39 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 47a570d25..8ca54ab5c 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -1,3 +1,7 @@ +.. ........................................................................... +.. © Copyright IBM Corporation 2024 . +.. ........................................................................... + ======================== Releases and maintenance ======================== @@ -8,49 +12,81 @@ The ``ibm_zos_core`` collection is developed and released on a flexible release a beta is released followed by a GA version. Occasionally, the cycle may be extended to properly implement and test larger changes before a new release is made available. -These are the component versions available when the collection was made generally available. The underlying +End of Life for this collection is generally a 2-year cycle unless a dependency reaches EOL prior to the 2 years. +For example, if a collection has released and its dependency reaches EOL 1 year later, then the collection will EOL +at the same time as the dependency, 1 year later. + +These are the component versions available when the collection was made generally available (GA). The underlying component version is likely to change as it reaches EOL, thus components must be a version that is currently supported. -For example, if a collection releases with a minimum version of ``ansible-core`` of 2.11.0, and later this -enters into EOL, then a newer and supported version of ansible-core must be used. +For example, if a collection releases with a minimum version of ``ansible-core`` 2.14.0 (Ansible 7.0) and later this +enters into EOL, then a newer supported version of ``ansible-core`` (Ansible) must be selected. When choosing a newer +``ansible-core`` (Ansible) version, review the `ansible-core support matrix`_ to select the appropriate dependencies. +This is important to note, different releases of ``ansible-core`` can require newer controller and managed node +dependencies such as is the case with Python. -End of Life (EOL) for this collection is generally a 2-year cycle unless a dependency reaches EOL prior to the 2 years. -For example, if a collection releases and its dependency reaches EOL 1 year later, then the collection will EOL at the -same time as the dependency, 1 year later. +If the controller is Ansible Automation Platform (AAP), review the `Red Hat Ansible Automation Platform Life Cycle`_ +to select a supported AAP version. + +For IBM product lifecycle information, you can search for products using a product name, version or ID. For example, +to view IBM's **Open Enterprise SDK for Python** lifecycle, search on product ID `5655-PYT`_, and for **Z Open Automation Utilities**, +search on product ID `5698-PA1`_. Support Matrix ============== -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| Version | GA Release | ansible-core | Ansible | AAP | End of Life | Control Node Python | Managed Node Dependencies | -+=========+===============+==============+=========+=======+===============+=====================+====================================================================+ -| 1.10.x | In preview | >=2.15.x | >=8.0.x | >=2.4 | TBD | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.3.0 or later | -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.9.x | 05 Feb 2024 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.5 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.8.x | 13 Dec 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.4 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.7.x | 10 Oct 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.3 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.6.x | 28 June 2023 | >=2.15.x | >=8.0.x | >=2.4 | 30 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ -| 1.5.x | 25 April 2023 | >=2.15.x | >=8.0.x | >=2.4 | 25 April 2025 | Python 3.10 - 3.11 |- z/OS V2R4 - V2R5 | -| | | | | | | |- z/OS shell | -| | | | | | | |- IBM Open Enterprise SDK for Python 3.10 - 3.11 | -| | | | | | | |- IBM Z Open Automation Utilities 1.2.2 or later, but prior to 1.3.0| -+---------+---------------+--------------+---------+-------+---------------+---------------------+--------------------------------------------------------------------+ \ No newline at end of file ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ +| Version | Controller | Managed Node | GA | End of Life | ++=========+=========================+===================================================+===============+===============+ +| 1.10.x | ansible-core >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | +| | Ansible >=8.0.x |- `z/OS shell`_ | | | +| | AAP >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ +| 1.9.x | ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 | +| | Ansible >=7.0.x |- `z/OS shell`_ | | | +| | AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | | ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ +| 1.8.x | ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 | +| | Ansible >=7.0.x |- `z/OS shell`_ | | | +| | AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | | ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ +| 1.7.x | ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 | +| | Ansible >=7.0.x |- `z/OS shell`_ | | | +| | AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | | ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ +| 1.6.x | ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 | +| | Ansible >=2.9.x |- `z/OS shell`_ | | | +| | AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ +| 1.5.x | ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 | +| | Ansible >=2.9.x |- `z/OS shell`_ | | | +| | AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+-------------------------+---------------------------------------------------+---------------+---------------+ + +.. ............................................................................. +.. Global Links +.. ............................................................................. +.. _ansible-core support matrix: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix +.. _Red Hat Ansible Automation Platform Life Cycle: + https://access.redhat.com/support/policy/updates/ansible-automation-platform +.. _Automation Hub: + https://www.ansible.com/products/automation-hub +.. _Open Enterprise SDK for Python: + https://www.ibm.com/products/open-enterprise-python-zos +.. _Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau/latest +.. _z/OS shell: + https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm +.. _z/OS: + https://www.ibm.com/docs/en/zos +.. _5655-PYT: + https://www.ibm.com/support/pages/lifecycle/search?q=5655-PYT +.. _5698-PA1: + https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1 \ No newline at end of file From 40e0a9422551bd6142ef2b11e99ec27fab345b28 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 14:49:48 -0700 Subject: [PATCH 403/413] Updat release notes with support matrix Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index df06d149c..a75f1b522 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -16,7 +16,7 @@ Major Changes - Starting with IBM Ansible z/OS core version 1.10.x, all module options are case sensitive, review the porting guide for specifics. - The README has been updated with a new template. -- The *Reference* section has been renamed to *Requirements" and now includes a support matrix. +- The **Reference** section has been renamed to **Requirements** and now includes a support matrix. Minor Changes ------------- From ff7c6d9b7caad99db8ca7e751c6b5c46c09f1b8a Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 14:52:58 -0700 Subject: [PATCH 404/413] Update support matrix with bullets Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 8ca54ab5c..ef11d955b 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -35,39 +35,39 @@ search on product ID `5698-PA1`_. Support Matrix ============== -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ -| Version | Controller | Managed Node | GA | End of Life | -+=========+=========================+===================================================+===============+===============+ -| 1.10.x | ansible-core >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | -| | Ansible >=8.0.x |- `z/OS shell`_ | | | -| | AAP >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ -| 1.9.x | ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 | -| | Ansible >=7.0.x |- `z/OS shell`_ | | | -| | AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | | -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ -| 1.8.x | ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 | -| | Ansible >=7.0.x |- `z/OS shell`_ | | | -| | AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | | -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ -| 1.7.x | ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 | -| | Ansible >=7.0.x |- `z/OS shell`_ | | | -| | AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | | -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ -| 1.6.x | ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 | -| | Ansible >=2.9.x |- `z/OS shell`_ | | | -| | AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ -| 1.5.x | ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 | -| | Ansible >=2.9.x |- `z/OS shell`_ | | | -| | AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | -+---------+-------------------------+---------------------------------------------------+---------------+---------------+ ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ +| Version | Controller | Managed Node | GA | End of Life | ++=========+==========================+===================================================+===============+===============+ +| 1.10.x |- ansible-core >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | +| |- Ansible >=8.0.x |- `z/OS shell`_ | | | +| |- AAP >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ +| 1.9.x |- ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 | +| |- Ansible >=7.0.x |- `z/OS shell`_ | | | +| |- AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | | ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ +| 1.8.x |- ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 | +| |- Ansible >=7.0.x |- `z/OS shell`_ | | | +| |- AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | | ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ +| 1.7.x |- ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 | +| |- Ansible >=7.0.x |- `z/OS shell`_ | | | +| |- AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | | ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ +| 1.6.x |- ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 | +| |- Ansible >=2.9.x |- `z/OS shell`_ | | | +| |- AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ +| 1.5.x |- ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 | +| |- Ansible >=2.9.x |- `z/OS shell`_ | | | +| |- AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+--------------------------+---------------------------------------------------+---------------+---------------+ .. ............................................................................. .. Global Links From d28047a00d9338d864bd4dd43a7fccb3b35b8526 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 14:59:57 -0700 Subject: [PATCH 405/413] Update support matrix with bullets Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 74 ++++++++++--------- 1 file changed, 40 insertions(+), 34 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index ef11d955b..2280ae746 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -35,45 +35,47 @@ search on product ID `5698-PA1`_. Support Matrix ============== -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ -| Version | Controller | Managed Node | GA | End of Life | -+=========+==========================+===================================================+===============+===============+ -| 1.10.x |- ansible-core >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | -| |- Ansible >=8.0.x |- `z/OS shell`_ | | | -| |- AAP >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ -| 1.9.x |- ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 | -| |- Ansible >=7.0.x |- `z/OS shell`_ | | | -| |- AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | | -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ -| 1.8.x |- ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 | -| |- Ansible >=7.0.x |- `z/OS shell`_ | | | -| |- AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | | -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ -| 1.7.x |- ansible-core >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 | -| |- Ansible >=7.0.x |- `z/OS shell`_ | | | -| |- AAP >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | | -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ -| 1.6.x |- ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 | -| |- Ansible >=2.9.x |- `z/OS shell`_ | | | -| |- AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ -| 1.5.x |- ansible-core >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 | -| |- Ansible >=2.9.x |- `z/OS shell`_ | | | -| |- AAP >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | -| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | -+---------+--------------------------+---------------------------------------------------+---------------+---------------+ ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| Version | Controller | Managed Node | GA | End of Life | ++=========+============================+===================================================+===============+===============+ +| 1.10.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | +| |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.9.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 | +| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.8.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 | +| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.7.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 | +| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.6.x |- `ansible-core`_ >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 | +| |- `Ansible`_ >=2.9.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.5.x |- `ansible-core`_ >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 | +| |- `Ansible`_ >=2.9.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ .. ............................................................................. .. Global Links .. ............................................................................. .. _ansible-core support matrix: https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix +.. _AAP: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix .. _Red Hat Ansible Automation Platform Life Cycle: https://access.redhat.com/support/policy/updates/ansible-automation-platform .. _Automation Hub: @@ -89,4 +91,8 @@ Support Matrix .. _5655-PYT: https://www.ibm.com/support/pages/lifecycle/search?q=5655-PYT .. _5698-PA1: - https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1 \ No newline at end of file + https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1 +.. _ansible-core: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix +.. _Ansible: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix \ No newline at end of file From ca1613fdb07a418d6ad505a1f27b3b9a49030f78 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 15:01:21 -0700 Subject: [PATCH 406/413] Update support matrix with bullets Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 2280ae746..acb0e6559 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -75,7 +75,7 @@ Support Matrix .. _ansible-core support matrix: https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix .. _AAP: - https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix + https://access.redhat.com/support/policy/updates/ansible-automation-platform .. _Red Hat Ansible Automation Platform Life Cycle: https://access.redhat.com/support/policy/updates/ansible-automation-platform .. _Automation Hub: From 58a9d815dfbb64d367502201f7dd9cdcbd4cb0a6 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 16:07:17 -0700 Subject: [PATCH 407/413] Fixed release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index a75f1b522..7254f3377 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -33,8 +33,8 @@ Bugfixes - ``zos_find`` - Option **size** failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported. - ``zos_job_submit`` - - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. - - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. + - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. + - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. - ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. From 0927a342f0fda51e3c8da220bca77f762a95b0b2 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 16:08:31 -0700 Subject: [PATCH 408/413] Fixed release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 7254f3377..e4070d8c0 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -31,6 +31,7 @@ Bugfixes - ``zos_apf`` - Option **list** previously only returned one data set, now it returns a list of retrieved data sets. - ``zos_blockinfile`` - Option **block** when containing double double quotation marks results in a task failure (failed=True); now the module handles this case to avoid failure. - ``zos_find`` - Option **size** failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported. + - ``zos_job_submit`` - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. From 1dbe5baddf116bfce8bd9288804d0dc18519b61b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 16:35:18 -0700 Subject: [PATCH 409/413] Fixed release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index e4070d8c0..84d8d21d2 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -34,8 +34,8 @@ Bugfixes - ``zos_job_submit`` - - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. - - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. + - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. + - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. - ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. From 29d695be62e25f5b93912ec8c2fb6a645c58163c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 16:43:23 -0700 Subject: [PATCH 410/413] Fixed release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 84d8d21d2..2a4dafe76 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -22,7 +22,7 @@ Minor Changes ------------- - ``zos_apf`` - Enhanced error messages when an exception is caught. -- ``zos_backup_restore`` - Add tmp_hlq option to the user interface to override the default high level qualifier (HLQ) for temporary and backup. +- ``zos_backup_restore`` - Added option **tmp_hlq** to the user module to override the default high level qualifier (HLQ) for temporary and backup. - ``zos_copy`` - Documented module options `group` and `owner`. Bugfixes From 16cf5dcee86bee095684cba1d872135d39413685 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 16:47:22 -0700 Subject: [PATCH 411/413] Fixed release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 2a4dafe76..87d707181 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -22,7 +22,7 @@ Minor Changes ------------- - ``zos_apf`` - Enhanced error messages when an exception is caught. -- ``zos_backup_restore`` - Added option **tmp_hlq** to the user module to override the default high level qualifier (HLQ) for temporary and backup. +- ``zos_backup_restore`` - Added option **tmp_hlq** to the user module to override the default high level qualifier (HLQ) for temporary and backup data sets. - ``zos_copy`` - Documented module options `group` and `owner`. Bugfixes From 6934ce032897b8a57a89486d4da8307496fc3729 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 21:43:08 -0700 Subject: [PATCH 412/413] Module doc generated updates Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_mvs_raw.rst | 29 --------------------------- docs/source/modules/zos_unarchive.rst | 2 +- 2 files changed, 1 insertion(+), 30 deletions(-) diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 3748f5ad9..d98c9493b 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -1748,35 +1748,6 @@ Examples VOLUMES(222222) - UNIQUE) - - name: Define a cluster using a literal block style indicator - with a 2 space indentation. - zos_mvs_raw: - program_name: idcams - auth: yes - dds: - - dd_output: - dd_name: sysprint - return_content: - type: text - - dd_input: - dd_name: sysin - content: |2 - DEFINE CLUSTER - - (NAME(ANSIBLE.TEST.VSAM) - - CYL(10 10) - - FREESPACE(20 20) - - INDEXED - - KEYS(32 0) - - NOERASE - - NONSPANNED - - NOREUSE - - SHAREOPTIONS(3 3) - - SPEED - - UNORDERED - - RECORDSIZE(4086 32600) - - VOLUMES(222222) - - UNIQUE) - diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index f450e3414..f2d7aba8b 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -1,4 +1,4 @@ -g + :github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_unarchive.py .. _zos_unarchive_module: From 19a0d89f0eb391029eb1c4a9d5bfa7021f37160c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 May 2024 22:14:54 -0700 Subject: [PATCH 413/413] Remove unused import Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/action/zos_copy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index d76a7032d..90d49874a 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -32,7 +32,7 @@ is_member ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template