diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml
index e03266e7b..dba9db3b8 100644
--- a/.github/ISSUE_TEMPLATE/bug_issue.yml
+++ b/.github/ISSUE_TEMPLATE/bug_issue.yml
@@ -1,7 +1,7 @@
name: Report a bug
description: Request that a bug be reviewed. Complete all required fields.
-title: "[Bug]
"
-labels: [Bug]
+title: "[Bug] Enter description"
+labels: ["Bug", "Needs Triage" ]
assignees:
- IBMAnsibleHelper
body:
@@ -13,63 +13,71 @@ body:
options:
- label: There are no existing issues.
required: true
- - type: checkboxes
- id: valid-dependencies
+ - type: textarea
+ id: issue-description
+ attributes:
+ label: Bug description
+ description: Describe the bug you are experiencing.
+ placeholder: |
+ Verbosity is encouraged, the more you share the better for us to understand.
+ 1. Include the steps to reproduce
+ 2. Include playbook if applicable
+ 3. Include screen captures of applicable
+ 4. Include expected vs actual results if applicable
+ validations:
+ required: true
+ - type: dropdown
+ id: collection-version
attributes:
- label: Are the dependencies a supported version?
- description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
+ label: IBM z/OS Ansible core Version
+ description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version).
+ multiple: false
options:
- - label: The dependencies are supported.
- required: true
+ - v1.12.0
+ - v1.12.0-beta.1
+ - v1.11.0
+ - v1.11.0-beta.1
+ - v1.10.0
+ - v1.10.0-beta.1
+ - v1.9.0 (default)
+ - v1.8.0
+ - v1.7.0
+ - v1.6.0
+ - v1.5.0
+ default: 6
+ validations:
+ required: true
- type: dropdown
id: zoau-version
attributes:
label: IBM Z Open Automation Utilities
- description: Which version of ZOAU are you using?
+ description: Which version of ZOAU are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
multiple: false
options:
- - v1.2.5
+ - v1.3.4
+ - v1.3.3
+ - v1.3.2
+ - v1.3.1
+ - v1.3.0
+ - v1.2.5 (default)
- v1.2.4
- v1.2.3
- v1.2.2
- - v1.2.1
- - v1.2.0
- - v1.1.1
- - v1.0.3
+ default: 5
validations:
required: true
- type: dropdown
id: python-version
attributes:
label: IBM Enterprise Python
- description: Which version of IBM Enterprise Python are you using?
+ description: Which version of IBM Enterprise Python are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
multiple: false
options:
+ - v3.13.x
- v3.12.x
- - v3.11.x
+ - v3.11.x (default)
- v3.10.x
- - v3.9.x
- - v3.8.x
- validations:
- required: true
- - type: dropdown
- id: collection-version
- attributes:
- label: IBM z/OS Ansible core Version
- description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version).
- multiple: false
- options:
- - v1.8.0-beta.1
- - v1.7.0
- - v1.7.0-beta.1
- - v1.6.0
- - v1.6.0-beta.1
- - v1.5.0
- - v1.4.1
- - v1.3.6
- - v1.2.1
- - v1.1.0
- - v1.0.0
+ default: 2
validations:
required: true
- type: dropdown
@@ -79,27 +87,24 @@ body:
description: What is the version of Ansible on the controller (`ansible --version`)?
multiple: false
options:
- - latest
- - v2.16.x
+ - v2.17.x
+ - v2.16.x (default)
- v2.15.x
- v2.14.x
- - v2.13.x
- - v2.12.x
- - v2.11.x
- - v2.9.x
+ default: 1
validations:
required: true
- type: dropdown
id: zos-version
attributes:
label: z/OS version
- description: What is the version of z/OS on the managed node?
+ description: What is the version of z/OS on the managed node? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
multiple: false
options:
- - v3.1
- - v2.5
+ - v3.1 (unsupported)
+ - v2.5 (default)
- v2.4
- - v2.3
+ default: 1
validations:
required: false
- type: dropdown
@@ -110,6 +115,7 @@ body:
multiple: true
options:
- zos_apf
+ - zos_archive
- zos_backup_restore
- zos_blockinfile
- zos_copy
@@ -129,21 +135,10 @@ body:
- zos_ping
- zos_script
- zos_tso_command
+ - zos_unarchive
+ - zos_volume_init
validations:
required: false
- - type: textarea
- id: issue-description
- attributes:
- label: Bug description
- description: Describe the bug you are experiencing.
- placeholder: |
- Verbosity is encouraged, the more you share the better for us to understand.
- 1. Include the steps to reproduce
- 2. Include playbook if applicable
- 3. Include screen captures of applicable
- 4. Include expected vs actual results if applicable
- validations:
- required: true
- type: textarea
id: issue-output
attributes:
diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml
index f601ce1e1..0dbf462bd 100644
--- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml
+++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml
@@ -23,70 +23,70 @@ body:
required: false
- label: No, support and service is involved.
required: false
- - type: checkboxes
- id: valid-dependencies
+ - type: textarea
+ id: issue-description
attributes:
- label: Are the dependencies a supported?
- description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
+ label: Collaboration description
+ description: Describe the collaboration issue.
+ placeholder: |
+ For example
+ 1. Working with IBM Enterprise Python to resolve issue xyz.
+ 2. Working with z/OS application team DFSMS to resolve xyz.
+ 3. Assisting IBM support to resolve an ibm_zos_copy issue.
+ validations:
+ required: true
+ - type: dropdown
+ id: collection-version
+ attributes:
+ label: IBM z/OS Ansible core Version
+ description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version).
+ multiple: false
options:
- - label: Yes, the dependencies are supported.
- required: false
- - label: Not applicable to this collaboration.
- required: false
+ - v1.12.0
+ - v1.12.0-beta.1
+ - v1.11.0
+ - v1.11.0-beta.1
+ - v1.10.0
+ - v1.10.0-beta.1
+ - v1.9.0 (default)
+ - v1.8.0
+ - v1.7.0
+ - v1.6.0
+ - v1.5.0
+ default: 6
+ validations:
+ required: false
- type: dropdown
id: zoau-version
attributes:
label: IBM Z Open Automation Utilities
- description: Which version of ZOAU are you using?
+ description: Which version of ZOAU are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
multiple: false
options:
- - v1.2.5
+ - v1.3.4
+ - v1.3.3
+ - v1.3.2
+ - v1.3.1
+ - v1.3.0
+ - v1.2.5 (default)
- v1.2.4
- v1.2.3
- v1.2.2
- - v1.2.1
- - v1.2.0
- - v1.1.1
- - v1.0.3
+ default: 5
validations:
required: false
- type: dropdown
id: python-version
attributes:
label: IBM Enterprise Python
- description: Which version of IBM Enterprise Python are you using?
- multiple: true
+ description: Which version of IBM Enterprise Python are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
+ multiple: false
options:
- - v3.14.x
- v3.13.x
- v3.12.x
- - v3.11.x
+ - v3.11.x (default)
- v3.10.x
- - v3.9.x
- - v3.8.x
- validations:
- required: false
- - type: dropdown
- id: collection-version
- attributes:
- label: IBM z/OS Ansible core Version
- description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version).
- multiple: false
- options:
- - v1.9.0
- - v1.9.0-beta.1
- - v1.8.0
- - v1.8.0-beta.1
- - v1.7.0
- - v1.7.0-beta.1
- - v1.6.0
- - v1.6.0-beta.1
- - v1.5.0
- - v1.4.1
- - v1.3.6
- - v1.2.1
- - v1.1.0
- - v1.0.0
+ default: 2
validations:
required: false
- type: dropdown
@@ -96,27 +96,24 @@ body:
description: What is the version of Ansible on the controller (`ansible --version`)?
multiple: false
options:
- - latest
- - v2.16.x
+ - v2.17.x
+ - v2.16.x (default)
- v2.15.x
- v2.14.x
- - v2.13.x
- - v2.12.x
- - v2.11.x
- - v2.9.x
+ default: 1
validations:
required: false
- type: dropdown
id: zos-version
attributes:
label: z/OS version
- description: What is the version of z/OS on the managed node?
+ description: What is the version of z/OS on the managed node? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
multiple: false
options:
- - v3.1
- - v2.5
+ - v3.1 (unsupported)
+ - v2.5 (default)
- v2.4
- - v2.3
+ default: 1
validations:
required: false
- type: dropdown
@@ -127,6 +124,7 @@ body:
multiple: true
options:
- zos_apf
+ - zos_archive
- zos_backup_restore
- zos_blockinfile
- zos_copy
@@ -146,23 +144,7 @@ body:
- zos_ping
- zos_script
- zos_tso_command
+ - zos_unarchive
+ - zos_volume_init
validations:
- required: false
- - type: textarea
- id: issue-description
- attributes:
- label: Collaboration description
- description: Describe the collaboration issue.
- placeholder: |
- For example
- 1. Working with IBM Enterprise Python to resolve issue xyz.
- 2. Working with z/OS application team DFSMS to resolve xyz.
- 3. Assisting IBM support to resolve an ibm_zos_copy issue.
- validations:
- required: true
-
-
-
-
-
-
+ required: false
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml
index 38a8f1818..b7de4a490 100644
--- a/.github/ISSUE_TEMPLATE/doc_issue.yml
+++ b/.github/ISSUE_TEMPLATE/doc_issue.yml
@@ -31,33 +31,32 @@ body:
id: collection-version
attributes:
label: IBM z/OS Ansible core Version
- description: Which version of z/OS Ansible core collection are you reporting a documentation bug. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version).
+ description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version).
multiple: false
options:
- - v1.9.0
- - v1.9.0-beta.1
+ - v1.12.0
+ - v1.12.0-beta.1
+ - v1.11.0
+ - v1.11.0-beta.1
+ - v1.10.0
+ - v1.10.0-beta.1
+ - v1.9.0 (default)
- v1.8.0
- - v1.8.0-beta.1
- v1.7.0
- - v1.7.0-beta.1
- v1.6.0
- - v1.6.0-beta.1
- v1.5.0
- - v1.4.1
- - v1.3.6
- - v1.2.1
- - v1.1.0
- - v1.0.0
+ default: 6
validations:
required: false
- type: dropdown
id: modules
attributes:
label: Ansible module
- description: Select which modules are being reported in this doc issue. You can select more than one.
+ description: Select which modules are being reported in this bug. You can select more than one.
multiple: true
options:
- zos_apf
+ - zos_archive
- zos_backup_restore
- zos_blockinfile
- zos_copy
@@ -77,5 +76,7 @@ body:
- zos_ping
- zos_script
- zos_tso_command
+ - zos_unarchive
+ - zos_volume_init
validations:
required: false
diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml
index d520148dc..c9584acfd 100644
--- a/.github/ISSUE_TEMPLATE/enabler_issue.yml
+++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml
@@ -15,14 +15,23 @@ body:
options:
- label: There are no existing issues.
required: true
+ - type: textarea
+ id: issue-description
+ attributes:
+ label: Enabler description
+ description: Describe the task.
+ placeholder: Verbosity is encouraged, the more you share the better for us to understand.
+ validations:
+ required: true
- type: dropdown
id: modules
attributes:
label: Ansible module
- description: Select which modules are being reported for this task. You can select more than one.
+ description: Select which modules are being reported in this bug. You can select more than one.
multiple: true
options:
- zos_apf
+ - zos_archive
- zos_backup_restore
- zos_blockinfile
- zos_copy
@@ -42,13 +51,8 @@ body:
- zos_ping
- zos_script
- zos_tso_command
+ - zos_unarchive
+ - zos_volume_init
validations:
required: false
- - type: textarea
- id: issue-description
- attributes:
- label: Enabler description
- description: Describe the task, this is the equivalent of a agile story.
- placeholder: Verbosity is encouraged, the more you share the better for us to understand.
- validations:
- required: true
+
diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml
index f190ee70c..98adbf65b 100644
--- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml
+++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml
@@ -13,15 +13,23 @@ body:
options:
- label: There are no existing issues.
required: true
+ - type: textarea
+ id: issue-description
+ attributes:
+ label: Enhancement or feature description
+ description: Describe the enhancement or feature you are requesting.
+ placeholder: Verbosity is encouraged, the more you share the better for us to understand.
+ validations:
+ required: true
- type: dropdown
id: modules
attributes:
label: Ansible module
- description: Select which modules are being reported in this enhancement or feature. You can select more than one.
+ description: Select which modules are being reported in this bug. You can select more than one.
multiple: true
options:
- - zos_archive
- zos_apf
+ - zos_archive
- zos_backup_restore
- zos_blockinfile
- zos_copy
@@ -42,14 +50,7 @@ body:
- zos_script
- zos_tso_command
- zos_unarchive
- validations:
- required: true
- - type: textarea
- id: issue-description
- attributes:
- label: Enhancement or feature description
- description: Describe the enhancement or feature you are requesting.
- placeholder: Verbosity is encouraged, the more you share the better for us to understand.
+ - zos_volume_init
validations:
required: true
diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml
index a7e7dcfa1..773ebbc90 100644
--- a/.github/ISSUE_TEMPLATE/module_issue.yml
+++ b/.github/ISSUE_TEMPLATE/module_issue.yml
@@ -1,6 +1,6 @@
name: Request a new module
description: Request a new module be added to the collection. Complete all required fields.
-title: "[Module] "
+title: "[Module] Enter description "
labels: [Module]
assignees:
- IBMAnsibleHelper
@@ -32,6 +32,6 @@ body:
- As a < type of user >, I want < some goal > so that < some reason >.
Examples:
- As a z/OS System Admin, I can grow zFS aggregates with Ansible so that my data sets don't fill up.
- - As a Junior developer, I want to be able to zip and unzip arives using Ansible, so that I don't have to perform operations elsewhere.
+ - As a Junior developer, I want to be able to zip and unzip archives using Ansible, so that I don't have to perform operations elsewhere.
validations:
required: false
diff --git a/.github/workflows/ac-ansible-test-sanity.yml b/.github/workflows/ac-ansible-test-sanity.yml
new file mode 100644
index 000000000..d0c4b58d2
--- /dev/null
+++ b/.github/workflows/ac-ansible-test-sanity.yml
@@ -0,0 +1,73 @@
+name: AC Ansible sanity
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ branches:
+ - dev
+ - staging*
+ paths-ignore:
+ - '**.tar.gz'
+ - 'pycache/**'
+ - '.ansible-lint'
+ - 'cache/**'
+ - '.DS_Store'
+ - '.git/**'
+ - '.github/**'
+ - '.gitignore'
+ - '.python-version'
+ - '.pytest_cache/**'
+ - '.vscode/**'
+ - 'Jenkinsfile'
+ - 'ac'
+ - 'ansible.cfg'
+ - 'changelogs/**'
+ - 'collections/**'
+ - 'docs/**'
+ - 'scripts/**'
+ - 'test_config.yml'
+ - 'tests/*.ini'
+ - 'tests/*.py'
+ - 'tests/.pytest_cache'
+ - 'tests/pycache'
+ - 'tests/functional'
+ - 'tests/helpers'
+ - 'tests/requirements.txt'
+ - 'tests/unit'
+ - 'tests/sanity/ignore-*'
+ - 'venv*'
+
+jobs:
+ ansible-sanity:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-latest
+ env:
+ branch: ${{ github.event.pull_request.head.ref }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+
+ - name: Set up venv
+ run: |
+ python -m pip install --upgrade pip
+ pip install virtualenv
+ mkdir venv
+ virtualenv venv/venv-2.16
+
+ - name: Install dependencies
+ run: |
+ source venv/venv-2.16/bin/activate
+ python -m pip install --upgrade pip
+ pip install ansible
+
+ - name: Run ac-sanity
+ run: |
+ source venv/venv-2.16/bin/activate
+ ./ac --ac-build
+ ./ac --ac-sanity
diff --git a/.github/workflows/ac-bandit.yml b/.github/workflows/ac-bandit.yml
new file mode 100644
index 000000000..1b93e40a4
--- /dev/null
+++ b/.github/workflows/ac-bandit.yml
@@ -0,0 +1,42 @@
+name: AC Bandit
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ branches:
+ - dev
+ - staging*
+ paths:
+ - 'plugins/**'
+
+jobs:
+ bandit:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+
+ - name: Set up venv
+ run: |
+ python -m pip install --upgrade pip
+ pip install virtualenv
+ mkdir venv
+ virtualenv venv/venv-2.16
+
+ - name: Install dependencies
+ run: |
+ source venv/venv-2.16/bin/activate
+ python -m pip install --upgrade pip
+ pip install bandit
+
+ - name: Run ac-bandit
+ run: |
+ source venv/venv-2.16/bin/activate
+ ./ac --ac-bandit --level l
diff --git a/.github/workflows/ac-galaxy-importer.yml b/.github/workflows/ac-galaxy-importer.yml
new file mode 100644
index 000000000..563d37ada
--- /dev/null
+++ b/.github/workflows/ac-galaxy-importer.yml
@@ -0,0 +1,72 @@
+name: AC Galaxy Importer
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ branches:
+ - dev
+ - staging*
+ paths-ignore:
+ - '**.tar.gz'
+ - 'pycache/**'
+ - '.ansible-lint'
+ - 'cache/**'
+ - '.DS_Store'
+ - '.git/**'
+ - '.github/**'
+ - '.gitignore'
+ - '.python-version'
+ - '.pytest_cache/**'
+ - '.vscode/**'
+ - 'Jenkinsfile'
+ - 'ac'
+ - 'ansible.cfg'
+ - 'changelogs/**'
+ - 'collections/**'
+ - 'docs/**'
+ - 'scripts/**'
+ - 'test_config.yml'
+ - 'tests/*.ini'
+ - 'tests/*.py'
+ - 'tests/.pytest_cache'
+ - 'tests/pycache'
+ - 'tests/functional'
+ - 'tests/helpers'
+ - 'tests/requirements.txt'
+ - 'tests/unit'
+ - 'tests/sanity/ignore-*'
+ - 'venv*'
+
+jobs:
+ galaxy-importer:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+
+ - name: Set up venv
+ run: |
+ python -m pip install --upgrade pip
+ pip install virtualenv
+ mkdir venv
+ virtualenv venv/venv-2.16
+
+ - name: Install dependencies
+ run: |
+ source venv/venv-2.16/bin/activate
+ python -m pip install --upgrade pip
+ pip install ansible
+ pip install ansible-importer
+ pip install galaxy-importer
+
+ - name: Run ac-galaxy-importer
+ run: |
+ source venv/venv-2.16/bin/activate
+ ./ac --ac-galaxy-importer
diff --git a/.github/workflows/ac_changelog.yml b/.github/workflows/ac_changelog.yml
new file mode 100644
index 000000000..e3b3f3cc4
--- /dev/null
+++ b/.github/workflows/ac_changelog.yml
@@ -0,0 +1,41 @@
+name: AC Changelog Lint
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ paths:
+ - 'changelogs/fragments/*'
+ branches:
+ - dev
+ - staging*
+
+jobs:
+ lint:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up python
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+
+ - name: Set up venv
+ run: |
+ python -m pip install --upgrade pip
+ pip install virtualenv
+ mkdir venv
+ virtualenv venv/venv-2.16
+
+ - name: Install dependencies
+ run: |
+ source venv/venv-2.16/bin/activate
+ pip install antsibull-changelog
+
+ - name: Run ac-changelog
+ run: |
+ source venv/venv-2.16/bin/activate
+ ./ac --ac-changelog --command lint
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index d2f69d546..460fad544 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,9 +1,241 @@
-==============================
-ibm.ibm_zos_core Release Notes
-==============================
+================================
+ibm.ibm\_zos\_core Release Notes
+================================
.. contents:: Topics
+v1.10.0-beta.1
+==============
+
+Release Summary
+---------------
+
+Release Date: '2024-05-08'
+This changelog describes all changes made to the modules and plugins included
+in this collection. The release date is the date the changelog is created.
+For additional details such as required dependencies and availability review
+the collections `release notes `__
+
+Minor Changes
+-------------
+
+- zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204).
+- zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265).
+- zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307).
+- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183).
+
+Breaking Changes / Porting Guide
+--------------------------------
+
+- zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_data_set - option ``record_format`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_data_set - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_data_set - option ``type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_job_submit - option ``location`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mount - option ``automove`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mount - option ``fs_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+- zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+
+Bugfixes
+--------
+
+- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261).
+- zos_apf - List option only returned one data set. Fix now returns the list of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204).
+- zos_blockinfile - Using double quotation marks inside a block resulted in a false positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340).
+- zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443).
+- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220).
+- zos_job_submit - when the argument max_rc was different than 0 the changed response returned as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345).
+- zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created during the module execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1320).
+
+v1.9.0
+======
+
+Release Summary
+---------------
+
+Release Date: '2024-03-11'
+This changelog describes all changes made to the modules and plugins included
+in this collection. The release date is the date the changelog is created.
+For additional details such as required dependencies and availability review
+the collections `release notes `__
+
+Major Changes
+-------------
+
+- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+
+Minor Changes
+-------------
+
+- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036).
+- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176).
+- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078).
+- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074).
+- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091).
+- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091).
+- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator.
+- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065).
+
+Bugfixes
+--------
+
+- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288).
+- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236).
+- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066).
+- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064).
+- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247).
+- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078).
+- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042).
+- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078).
+- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120).
+- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283).
+- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042).
+- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045).
+- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073).
+
+v1.8.0
+======
+
+Release Summary
+---------------
+
+Release Date: '2023-12-08'
+This changelog describes all changes made to the modules and plugins included
+in this collection. The release date is the date the changelog is created.
+For additional details such as required dependencies and availability review
+the collections `release notes `__
+
+Minor Changes
+-------------
+
+- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029)
+- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029)
+- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965)
+- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962)
+- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980).
+- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804)
+- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014)
+- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962)
+- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951)
+- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063).
+- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976)
+- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976)
+- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068).
+- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072).
+- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029)
+- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965)
+
+Deprecated Features
+-------------------
+
+- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904).
+
+Bugfixes
+--------
+
+- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968).
+- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079).
+- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067).
+- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069).
+- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951)
+- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952)
+- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916)
+- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918)
+- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063).
+- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063).
+- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054).
+
+Known Issues
+------------
+
+- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972)
+- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983)
+
+New Modules
+-----------
+
+- ibm.ibm_zos_core.zos_script - Run scripts in z/OS
+
+v1.7.0
+======
+
+Release Summary
+---------------
+
+Release Date: '2023-10-09'
+This changelog describes all changes made to the modules and plugins included
+in this collection. The release date is the date the changelog is created.
+For additional details such as required dependencies and availability review
+the collections `release notes `__
+
+Major Changes
+-------------
+
+- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896)
+
+Minor Changes
+-------------
+
+- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667)
+- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930).
+- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930).
+- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773)
+- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821)
+- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841)
+- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778)
+- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911)
+- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841)
+- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841)
+- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930).
+- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930).
+
+Bugfixes
+--------
+
+- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791).
+- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930).
+- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840)
+- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824)
+- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794).
+- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806).
+- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806).
+- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772).
+- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795)
+- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791).
+- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806).
+- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747)
+- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762).
+
+New Modules
+-----------
+
+- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS.
+- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS.
v1.9.0
======
@@ -336,7 +568,6 @@ in this collection. The release date is the date the changelog is created.
For additional details such as required dependencies and availability review
the collections `release notes `__
-
Bugfixes
--------
@@ -360,7 +591,6 @@ in this collection. The release date is the date the changelog is created.
For additional details such as required dependencies and availability review
the collections `release notes `__
-
Major Changes
-------------
@@ -432,7 +662,6 @@ in this collection. The release date is the date the changelog is created.
For additional details such as required dependencies and availability review
the collections `release notes `__
-
Minor Changes
-------------
@@ -461,7 +690,6 @@ in this collection.
For additional details such as required dependencies and availablity review
the collections `release notes `__
-
Bugfixes
--------
@@ -486,7 +714,6 @@ in this collection.
For additional details such as required dependencies and availablity review
the collections `release notes `__
-
Bugfixes
--------
@@ -505,7 +732,6 @@ in this collection.
For additional details such as required dependencies and availablity review
the collections `release notes `__
-
Bugfixes
--------
@@ -540,7 +766,6 @@ the collections `release notes `__
-
Minor Changes
-------------
diff --git a/README.md b/README.md
index b2345c118..4cfe70a40 100644
--- a/README.md
+++ b/README.md
@@ -1,85 +1,189 @@
-IBM z/OS core collection
-========================
-
-The **IBM® z/OS® core collection**, also represented as
-**ibm_zos_core** in this document, is part of the broader
-initiative to bring Ansible Automation to IBM Z® through the offering
-**Red Hat® Ansible Certified Content for IBM Z®**. The
-**IBM z/OS core collection** supports automation tasks such as
-creating data sets, submitting jobs, querying jobs, retrieving job output,
-encoding data, fetching data sets, copying data sets,
-executing operator commands, executing TSO commands, ping,
-querying operator actions, APF authorizing libraries,
-editing textual data in data sets or Unix System Services files,
-finding data sets, backing up and restoring data sets and
-volumes, mounting file systems, running z/OS programs without JCL,
-running local and remote scripts on z/OS, initializing volumes,
-archiving, unarchiving and templating with Jinja.
-
-
-Red Hat Ansible Certified Content for IBM Z
-===========================================
-
-**Red Hat® Ansible Certified Content for IBM Z** provides the ability to
-connect IBM Z® to clients' wider enterprise automation strategy through the
-Ansible Automation Platform ecosystem. This enables development and operations
-automation on Z through a seamless, unified workflow orchestration with
-configuration management, provisioning, and application deployment in
-one easy-to-use platform.
-
-The **IBM z/OS core collection** is following the
-**Red Hat® Ansible Certified Content for IBM Z®** method of distributing
-content. Collections will be developed in the open, and when content is ready
-for use, it is released to
-[Ansible Galaxy](https://galaxy.ansible.com/ui/)
-for community adoption. Once contributors review community usage, feedback,
-and are satisfied with the content published, the collection will then be
-released to [Ansible Automation Hub](https://www.ansible.com/products/automation-hub)
-as **certified** and **IBM supported** for
-**Red Hat® Ansible Automation Platform subscribers**.
-
-For guides and reference, please review the [documentation](https://ibm.github.io/z_ansible_collections_doc/index.html).
-
-Features
-========
-The **IBM z/OS core collection**, includes
-[connection plugins](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/plugins.html#connection),
-[action plugins](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/plugins.html#action),
-[modules](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/modules.html),
-[filters](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/filters.html),
-and ansible-doc to automate tasks on z/OS.
-
-Ansible version compatibility
-=============================
-This collection has been tested against **Ansible Core** versions >=2.15.
-The Ansible Core versions supported for this collection align to the
-[ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the
-[Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages**
-and **ansible-core**.
-
-For **Ansible Automation Platform** (AAP) users, review the
-[Ansible Automation Platform Certified Content](https://access.redhat.com/support/articles/ansible-automation-platform-certified-content)
-and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform)
-for more more information on supported versions of Ansible.
-
-Other Dependencies
-==================
-This release of the **IBM z/OS core collection** requires the z/OS managed node have the following:
-- [z/OS](https://www.ibm.com/docs/en/zos)
-- [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm).
-- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos)
-- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x)
-For specific dependency versions, please review the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) for the version of the IBM Ansible z/OS core installed.
-
-Copyright
-=========
-© Copyright IBM Corporation 2020-2024.
-
-License
-=======
-Some portions of this collection are licensed under [GNU General Public
-License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and
-other portions of this collection are licensed under [Apache License,
-Version 2.0](http://www.apache.org/licenses/LICENSE-2.0).
+# IBM® z/OS® core collection
+The **IBM z/OS core** collection enables Ansible to interact with z/OS Data Sets and USS files. The collection focuses on operating system fundamental operations such as managing encodings, creating data sets, and submitting jobs.
+### Description
+The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, perform program authorizations, run operator operations, and execute both JES and MVS commands as well as execute shell, python, and REXX scripts. It supports data set creation, searching, copying, fetching, and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating.
+
+System programmers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time.
+
+## Requirements
+Before you install the IBM z/OS core collection, you must configure a control node and managed node with a minimum set of requirements.
+The following [table](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html) details the specific software requirements for the controller and managed node.
+
+### Ansible Controller
+* This release of the collection requires **ansible-core >=2.15** (Ansible >=8.x), for additional requirements such as Python, review the [support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix).
+
+### Managed Node
+This release of the collection requires the following:
+* [z/OS](https://www.ibm.com/docs/en/zos) V2R4 (or later) but prior to version V3R1.
+* [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm).
+* [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11.
+* [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau) 1.3.0 or later.
+
+## Installation
+Before using this collection, you need to install it with the Ansible Galaxy command-line tool:
+
+```
+ansible-galaxy collection install ibm.ibm_zos_core
+```
+
+You can also include it in a requirements.yml file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format:
+
+```
+collections:
+ - name: ibm.ibm_zos_core
+```
+
+Note that if you install the collection from Ansible Galaxy, it will not be upgraded automatically when you upgrade the Ansible package.
+
+To upgrade the collection to the latest available version, run the following command:
+
+```
+ansible-galaxy collection install ibm.ibm_zos_core --upgrade
+```
+
+You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0:
+
+```
+ansible-galaxy collection install ibm.ibm_zos_core:1.0.0
+```
+
+You can also install a beta version of the collection. A beta version is only available on Galaxy and is only supported by the community until it is promoted to General Availability (GA). Use the following syntax to install a beta version:
+
+```
+ansible-galaxy collection install ibm.ibm_zos_core:1.10.0-beta.1
+```
+
+As part of the installation, the collection [requirements](#Requirements) must be made available to Ansible through the use of [environment variables](https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/zos_core/configuration_guide.md#environment-variables). The preferred configuration is to place the environment variables in `group_vars` and `host_vars`, you can find examples of this configuration under any [playbook project](https://github.com/IBM/z_ansible_collections_samples), for example, review the **data set** example [configuration](https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_concepts/data_sets/data_set_basics#configuration) documentation.
+
+If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657).
+
+To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method).
+
+If the wheel is installed using the `--target` option, it will install the package into the specified target directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /usr/zoau/wheels`. Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions or persist
+after an update.
+
+If the wheel is installed using the `--user` option, it will install the package into the user directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /u/user`
+
+Environment variables:
+```
+PYZ: "path_to_python_installation_on_zos_target"
+ZOAU: "path_to_zoau_installation_on_zos_target"
+ZOAU_PYTHONPATH: "path_to_zoau_wheel_installation_directory"
+
+ansible_python_interpreter: "{{ PYZ }}/bin/python3"
+
+environment_vars:
+ _BPXK_AUTOCVT: "ON"
+ ZOAU_HOME: "{{ ZOAU }}"
+ PYTHONPATH: "{{ ZOAU_PYTHONPATH }}"
+ LIBPATH: "{{ ZOAU }}/lib:{{ PYZ }}/lib:/lib:/usr/lib:."
+ PATH: "{{ ZOAU }}/bin:{{ PYZ }}/bin:/bin:/var/bin"
+ _CEE_RUNOPTS: "FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)"
+ _TAG_REDIR_ERR: "txt"
+ _TAG_REDIR_IN: "txt"
+ _TAG_REDIR_OUT: "txt"
+ LANG: "C"
+ PYTHONSTDINENCODING: "cp1047"
+```
+
+## Use Cases
+* Use Case Name: Add a new z/OS User
+ * Actors:
+ * Application Developer
+ * Description:
+ * An application developer can submit a new user request for the system admin to approve.
+ * Flow:
+ * Verify user does not exist; create home directory, password, and passphrase
+ * Create home directory and the user to the system
+ * Provide access to resource, add to system groups, and define an alias
+ * Create the users ISPROF data set
+ * Create user private data set, mount with persistence
+ * Generate email with login credentials
+* Use Case Name: Automate certificate renewals
+ * Actors:
+ * System Admin
+ * Description:
+ * The system administrator can automate certificate renewals
+ * Flow:
+ * Setup, configure and run z/OS Health Checker to generate a report
+ * Search the Health Checker report for expiring certificates
+ * Renew expiring certificates
+ * Collect expiring certificate attributes and backup certificate
+ * Replicate certificate with a new label
+ * Generate signing request and sign new certificate
+ * Supersede the old with the new certificate
+ * Delete old certificate and relabel new certificate with previous certificate name
+* Use Case Name: Provision a Liberty Profile Instance
+ * Actors:
+ * Application Developer
+ * Description:
+ * An application developer can provision an application runtime that accelerates the delivery of cloud-native applications.
+ * Flow:
+ * Create and mount a file system for the Liberty profile.
+ * Create a Liberty Profile instance with optional configurations.
+ * Enable z/OS authorized services for the Liberty profile.
+ * Start an angel process or a server process
+
+## Testing
+All releases, including betas will have:
+* 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests.
+* 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of [ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests).
+* 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst).
+* 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives.
+
+This release of the collection was tested with:
+* ansible-core v2.15.x
+* Python 3.9.x
+* IBM Open Enterprise SDK for Python 3.11.x
+* IBM Z Open Automation Utilities (ZOAU) 1.3.0.x
+* z/OS V2R5
+
+This release introduces case sensitivity for option values and includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values will need to be updated.
+
+## Contributing
+This community is not currently accepting contributions. However, we encourage you to open [git issues](https://github.com/ansible-collections/ibm_zos_core/issues) for bugs, comments or feature requests and check back periodically for when community contributions will be accepted in the near future.
+
+Review the [development docs](https://ibm.github.io/z_ansible_collections_doc/zhmc-ansible-modules/docs/source/development.html#development) to learn how you can create an environment and test the collections modules.
+
+### Communicating with the IBM z/OS core community
+If you would like to communicate with this community, you can do so through:
+* GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions).
+* GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose).
+* [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness.
+* Discord [System Z Enthusiasts](https://forum.ansible.com/) room [ansible](https://discord.gg/nKC8F89v).
+* Matrix Ansible room [ansible-zos](#ansible-zos:matrix.org).
+* Ansible community Matrix [rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels).
+
+## Support
+As Red Hat Ansible [Certified Content](https://catalog.redhat.com/software/search?target_platforms=Red%20Hat%20Ansible%20Automation%20Platform), this collection is entitled to [support](https://access.redhat.com/support/) through [Ansible Automation Platform](https://www.redhat.com/en/technologies/management/ansible) (AAP). After creating a Red Hat support case, if it is determined the issue belongs to IBM, Red Hat will instruct you to create an [IBM support case](https://www.ibm.com/mysupport/s/createrecord/NewCase) and share the case number with Red Hat so that a collaboration can begin between Red Hat and IBM.
+
+If a support case cannot be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible).
+
+The current supported versions of this collection can be found listed under the [release section](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html).
+
+## Release Notes and Roadmap
+The collection's cumulative release notes can be reviewed [here](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). Note, some collections release before an ansible-core version reaches End of Life (EOL), thus the version of ansible-core that is supported must be a version that is currently supported.
+
+For AAP users, to see the supported ansible-core versions, review the [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform).
+
+For Galaxy and GitHub users, to see the supported ansible-core versions, review the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix).
+
+The collection's changelogs can be reviewed in the following table.
+| Version | ansible-core | Ansible | Status |
+|---------|--------------|---------|----------------------------|
+| 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)|
+| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | In preview |
+| [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 |
+| [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 |
+| [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 |
+| [1.6.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 28 June 2023 |
+| [1.5.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 25 April 2023 |
+
+## Related Information
+Example playbooks and use cases can be be found in the [z/OS playbook repository](https://github.com/IBM/z_ansible_collections_samples).
+Supplemental content on getting started with Ansible, architecture and use cases is available [here](https://ibm.github.io/z_ansible_collections_doc/reference/helpful_links.html).
+
+## License Information
+Some portions of this collection are licensed under [GNU General Public License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and other portions of this collection are licensed under [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0).
See individual files for applicable licenses.
\ No newline at end of file
diff --git a/ac b/ac
index b5febedbb..9aee6a02d 100755
--- a/ac
+++ b/ac
@@ -241,6 +241,44 @@ ac_build(){
$VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-*
}
+# ------------------------------------------------------------------------------
+# Run galaxy importer on collection.
+# ------------------------------------------------------------------------------
+#->ac-galaxy-importer:
+## Build current branch and run galaxy importer on collection.
+## Usage: ac [--ac-galaxy-importer]
+## Example:
+## $ ac --ac-galaxy-importer
+ac_galaxy_importer(){
+ message "Running Galaxy Importer"
+ . $VENV_BIN/activate && collection_name=$($VENV_BIN/ansible-galaxy collection build --force | awk -F/ '{print $NF}') && python -m galaxy_importer.main $collection_name
+}
+
+# Run a changelog lint locally
+# ------------------------------------------------------------------------------
+#->ac-changelog:
+## Runs antsibull-changelog to generate the release changelog or perform a lint
+## on changelog fragments or release notes.
+## Usage: ac [--ac-changelog ]
+## - choose from 'init', 'lint', 'lint-changelog-yaml', 'release', 'generate'
+## - generate generate the changelog
+## - init set up changelog infrastructure for collection, or an other project
+## - lint check changelog fragments for syntax errors
+## - lint-changelog-yaml check syntax of changelogs/changelog.yaml file
+## - release add a new release to the change metadata
+## Example:
+## $ ac --ac-changelog --command lint
+## $ ac --ac-changelog --command release
+## $ ac --ac-changelog
+ac_changelog(){
+ option_command=$1
+ if [ ! "$option_command" ]; then
+ option_command="lint"
+ fi
+ message "Running Changelog '$option_command'"
+ . $VENV_BIN/activate && antsibull-changelog "${option_command}"
+}
+
# ------------------------------------------------------------------------------
# Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured)
# ------------------------------------------------------------------------------
@@ -584,7 +622,7 @@ host_nodes(){
## the 'password' option should only an option when the utility can not decrypt.
## Usage: ac [--venv-setup] [--password 123456]
## Example:
-## $ ac --venv-setup --passsword 123456
+## $ ac --venv-setup --password 123456
## $ ac --venv-setup
venv_setup(){
option_pass=$1
@@ -636,7 +674,6 @@ while true; do
exit 1
fi
fi
-
case $1 in
-h|-\?|--help)
if [ "$1" = "-h" ] || [ "$1" = "-?" ]; then
@@ -654,6 +691,14 @@ while true; do
ensure_managed_venv_exists $1
option_submitted="--ac-build"
;;
+ --ac-galaxy-importer) # Command
+ ensure_managed_venv_exists $1
+ option_submitted="--ac-galaxy-importer"
+ ;;
+ --ac-changelog) # Command
+ ensure_managed_venv_exists $1
+ option_submitted="--ac-changelog"
+ ;;
--ac-install)
ensure_managed_venv_exists $1 # Command
option_submitted="--ac-install"
@@ -717,6 +762,11 @@ while true; do
ensure_managed_venv_exists $1
option_submitted="--venv-stop"
;;
+ --command|--command=?*) # option
+ command=`option_processor $1 $2`
+ option_sanitize $command
+ shift
+ ;;
--debug|--debug=?*) # option
debug=`option_processor $1 $2`
option_sanitize $debug
@@ -801,6 +851,10 @@ if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then
ac_bandit $level
elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then
ac_build
+elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ; then
+ ac_galaxy_importer
+elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then
+ ac_changelog $command
elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then
ac_install $version
elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then
diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml
index 4e2979ebb..e5bd167b7 100644
--- a/changelogs/.plugin-cache.yaml
+++ b/changelogs/.plugin-cache.yaml
@@ -135,4 +135,4 @@ plugins:
strategy: {}
test: {}
vars: {}
-version: 1.9.0
+version: 1.10.0-beta.1
diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml
index a8404bf84..6e034e91c 100644
--- a/changelogs/changelog.yaml
+++ b/changelogs/changelog.yaml
@@ -78,6 +78,173 @@ releases:
name: zos_tso_command
namespace: ''
release_date: '2022-06-07'
+ 1.10.0-beta.1:
+ changes:
+ breaking_changes:
+ - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase
+ choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_backup_restore - option ``space_type`` no longer accepts uppercase choices,
+ users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase
+ choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_data_set - option ``record_format`` no longer accepts uppercase choices,
+ users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_data_set - option ``space_type`` no longer accepts uppercase choices,
+ users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_data_set - option ``type`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_data_set - options inside ``batch`` no longer accept uppercase choices,
+ users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_job_submit - option ``location`` no longer accepts uppercase choices,
+ users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mount - option ``automove`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mount - option ``fs_type`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users
+ should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices,
+ users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase
+ choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal``
+ of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices.
+ This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer
+ accepts uppercase choices, users should replace them with lowercase ones.
+ (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts
+ uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388).
+ bugfixes:
+ - module_utils/job.py - job output containing non-printable characters would
+ crash modules. Fix now handles the error gracefully and returns a message
+ to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261).
+ - zos_apf - List option only returned one data set. Fix now returns the list
+ of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204).
+ - zos_blockinfile - Using double quotation marks inside a block resulted in
+ a false positive result with ZOAU 1.3. Fix now handles this special case to
+ avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340).
+ - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets
+ the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443).
+ - zos_job_submit - Was ignoring the default value for location=DATA_SET, now
+ when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220).
+ - zos_job_submit - when the argument max_rc was different than 0 the changed
+ response returned as false. Fix now return a changed response as true when
+ the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345).
+ - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating
+ temporary data sets. Fix now honors the value if provided and uses it as High
+ Level Qualifier for temporary data sets created during the module execution.
+ (https://github.com/ansible-collections/ibm_zos_core/pull/1320).
+ minor_changes:
+ - zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204).
+ - zos_backup_restore - Add tmp_hlq option to the user interface to override
+ the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265).
+ - zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307).
+ - zos_copy - Improve zos_copy performance when copying multiple members from
+ one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183).
+ release_summary: 'Release Date: ''2024-05-08''
+
+ This changelog describes all changes made to the modules and plugins included
+
+ in this collection. The release date is the date the changelog is created.
+
+ For additional details such as required dependencies and availability review
+
+ the collections `release notes `__'
+ fragments:
+ - 1032-clean-job_submit-test.yml
+ - 1152-zos-lineinfile-remove-zos_copy-dependency.yml
+ - 1156-zos_archive-remove-zos_copy_dep.yml
+ - 1157-remove-zos-copy-from-zos-encode-tests.yml
+ - 1165-remove-zos-copy-dep-from-zos-fetch.yml
+ - 1167-remove-zos-copy-from-zos-blockinfile-tests.yml
+ - 1169-util-job-zoau-migration.yml
+ - 1179-remove-zos_encode-from_zos_lineinfile-tests.yml
+ - 1181-zoau-migration-zos_operator.yml
+ - 1182-migrate-module-utils-data-set.yml
+ - 1183-copy-members.yml
+ - 1184-remove-zos-fetch-dep-from-zos-copy.yml
+ - 1187-migrate-module-utils-copy.yml
+ - 1188-migrate-module_utils-backup.yml
+ - 1189-migrate-module_utils-encode.yml
+ - 1190-migrate-module_utils-dd_statement.yml
+ - 1196-zoau-migration-zos_gather_facts.yml
+ - 1202-doc-gen-script-portability.yml
+ - 1204-migrate-zos_apf.yml
+ - 1209-zoau-migration-zos_job_submit.yml
+ - 1215-Migrate_zos_operator_action_query.yml
+ - 1216-Validate_module_zos_job_output_migration.yml
+ - 1217-validate-job-query.yml
+ - 1218-migrate-zos_encode.yml
+ - 1220-bugfix-zos_job_submit-default_value.yml
+ - 1222-zoau-migration-zos_copy.yml
+ - 1227-migrate-zos_archive.yml
+ - 1228-zos_find-remove-zos_lineinfile_dep.yml
+ - 1229-migrate-zos_fetch.yml
+ - 1237-migrate-zos_mount.yml
+ - 1238-migrate-zos_unarchive.yml
+ - 1242-zoau-migration-zos_data_set.yml
+ - 1256_Migrate_zos_blockinfile_and_lineinfile.yml
+ - 1257-zoau-import-zos_apf.yml
+ - 1261-job-submit-non-utf8-chars.yml
+ - 1265_Migrate_zos_backup_restore.yml
+ - 1270-quick-fix-len-of-volumes-work-around.yml
+ - 1286-update-zos_archive-zos_unarchive-docs.yml
+ - 1295-doc-zos_ping-scp.yml
+ - 1298-Remove_local_charset_from_zos_fetch.yml
+ - 1307-update-sanity-zos_copy.yml
+ - 1320-Zos_mvs_raw_ignores_tmp_hlq.yml
+ - 1322-update-docstring-encode.yml
+ - 1331-update-docstring-ickdsf.yml
+ - 1332-update-docstring-import_handler.yml
+ - 1333-update-docstring-job.yml
+ - 1336-update-docstring-validation.yml
+ - 1340-Work_around_fix_false_positive.yml
+ - 1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml
+ - 1347-update-docstring-zos_data_set.yml
+ - 1348-update-docstring-zos_encode.yml
+ - 1349-update-docstring-zos_fetch.yml
+ - 1350-update-docstring-zos_find.yml
+ - 1351-update-docstring-zos_gather_facts.yml
+ - 1352-update-docstring-zos_job_output.yml
+ - 1353-update-docstring-zos_job_query.yml
+ - 1354-update-docstring-zos_job_submit.yml
+ - 1355-update-docstring-zos_lineinfile.yml
+ - 1356-update-docstring-zos_mount.yml
+ - 1388-lowercase-choices.yml
+ - 1390-update-docstring-zos_script.yml
+ - 1391-update-docstring-zos_tso_command.yml
+ - 1392-update-docstring-zos_volume_init.yml
+ - 1393-update-docstring-zos_apf.yml
+ - 1394-Update_docstring-zos_operator_action_query.yml
+ - 1443-zos_find-filter-size.yml
+ - 692-changelog-lint-ac-tool.yml
+ - 971-bug-job_submit-can-stacktrace.yml
+ - 992-fix-sanity4to6.yml
+ - v1.10.0-beta.1_summary.yml
+ release_date: '2024-05-08'
1.2.1:
changes:
bugfixes:
diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh
index befcaecfe..d7ce5472b 100755
--- a/docs/scripts/post-zos_apf.sh
+++ b/docs/scripts/post-zos_apf.sh
@@ -28,5 +28,5 @@ SCRIPT_DIR=`dirname "$0"`
CURR_PATH=`pwd`
# Delete any temporary index RST
if [[ -f $CURR_PATH/source/modules/zos_apf.rst ]]; then
- sed -i '' "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst
+ sed -i'' -e "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst
fi
diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh
index ca35775d9..4fb77f071 100755
--- a/docs/scripts/pre-template.sh
+++ b/docs/scripts/pre-template.sh
@@ -22,11 +22,11 @@
# and Jinja2 template, and later sphinx html.
# This requries that the ansible collection be prebuilt so that it can find
# the template.py within the collection (not within the git project). Thus run
-# './ac --ac-build' before the make file that builds doc.
+# './ac --ac-build' before the make file that builds doc.
################################################################################
template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'`
cp $template_doc_source $template_doc_source.tmp
-sed -i '' "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source
-sed -i '' "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source
-sed -i '' "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source
+sed -i'' -e "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source
+sed -i'' -e "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source
+sed -i'' -e "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source
diff --git a/docs/source/index.rst b/docs/source/index.rst
index c150d27c0..7cea15c03 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -90,10 +90,9 @@ ansible-doc to automate tasks on z/OS.
community_guides
license
+.. toctree::
+ :maxdepth: 1
+ :caption: Reference
-
-
-
-
-
-
+ reference/documentation
+ reference/community
diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst
index e9a55c007..a94fdc95e 100644
--- a/docs/source/modules/zos_apf.rst
+++ b/docs/source/modules/zos_apf.rst
@@ -62,7 +62,7 @@ volume
1. The volume serial number.
- 2. Six asterisks (******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume.
+ 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume.
3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog.
@@ -189,7 +189,7 @@ batch
1. The volume serial number
- 2. Six asterisks (******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume.
+ 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume.
3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog.
@@ -225,7 +225,7 @@ Examples
- name: Add a library (cataloged) to the APF list and persistence
zos_apf:
library: SOME.SEQUENTIAL.DATASET
- force_dynamic: True
+ force_dynamic: true
persistent:
data_set_name: SOME.PARTITIONED.DATASET(MEM)
- name: Remove a library from the APF list and persistence
@@ -243,7 +243,7 @@ Examples
batch:
- library: SOME.SEQ.DS1
- library: SOME.SEQ.DS2
- sms: True
+ sms: true
- library: SOME.SEQ.DS3
volume: T12345
- name: Print the APF list matching library pattern or volume serial number
diff --git a/docs/source/modules/zos_apf.rst-e b/docs/source/modules/zos_apf.rst-e
new file mode 100644
index 000000000..b758d3129
--- /dev/null
+++ b/docs/source/modules/zos_apf.rst-e
@@ -0,0 +1,318 @@
+
+:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_apf.py
+
+.. _zos_apf_module:
+
+
+zos_apf -- Add or remove libraries to Authorized Program Facility (APF)
+=======================================================================
+
+
+
+.. contents::
+ :local:
+ :depth: 1
+
+
+Synopsis
+--------
+- Adds or removes libraries to Authorized Program Facility (APF).
+- Manages APF statement persistent entries to a data set or data set member.
+- Changes APF list format to "DYNAMIC" or "STATIC".
+- Gets the current APF list entries.
+
+
+
+
+
+Parameters
+----------
+
+
+library
+ The library name to be added or removed from the APF list.
+
+ | **required**: False
+ | **type**: str
+
+
+state
+ Ensure that the library is added ``state=present`` or removed ``state=absent``.
+
+ The APF list format has to be "DYNAMIC".
+
+ | **required**: False
+ | **type**: str
+ | **default**: present
+ | **choices**: absent, present
+
+
+force_dynamic
+ Will force the APF list format to "DYNAMIC" before adding or removing libraries.
+
+ If the format is "STATIC", the format will be changed to "DYNAMIC".
+
+ | **required**: False
+ | **type**: bool
+ | **default**: False
+
+
+volume
+ The identifier for the volume containing the library specified in the ``library`` parameter. The values must be one the following.
+
+ 1. The volume serial number.
+
+ 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume.
+
+ 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog.
+
+ If ``volume`` is not specified, ``library`` has to be cataloged.
+
+ | **required**: False
+ | **type**: str
+
+
+sms
+ Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library.
+
+ If ``sms=True``, ``volume`` value will be ignored.
+
+ | **required**: False
+ | **type**: bool
+ | **default**: False
+
+
+operation
+ Change APF list format to "DYNAMIC" ``operation=set_dynamic`` or "STATIC" ``operation=set_static``
+
+ Display APF list current format ``operation=check_format``
+
+ Display APF list entries when ``operation=list`` ``library``, ``volume`` and ``sms`` will be used as filters.
+
+ If ``operation`` is not set, add or remove operation will be ignored.
+
+ | **required**: False
+ | **type**: str
+ | **choices**: set_dynamic, set_static, check_format, list
+
+
+tmp_hlq
+ Override the default high level qualifier (HLQ) for temporary and backup datasets.
+
+ The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used.
+
+ | **required**: False
+ | **type**: str
+
+
+persistent
+ Add/remove persistent entries to or from *data_set_name*
+
+ ``library`` will not be persisted or removed if ``persistent=None``
+
+ | **required**: False
+ | **type**: dict
+
+
+ data_set_name
+ The data set name used for persisting or removing a ``library`` from the APF list.
+
+ | **required**: True
+ | **type**: str
+
+
+ marker
+ The marker line template.
+
+ ``{mark}`` will be replaced with "BEGIN" and "END".
+
+ Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs.
+
+ ``{mark}`` length may not exceed 72 characters.
+
+ The timestamp () used in the default marker follows the '+%Y%m%d-%H%M%S' date format
+
+ | **required**: False
+ | **type**: str
+ | **default**: /* {mark} ANSIBLE MANAGED BLOCK */
+
+
+ backup
+ Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*".
+
+ *backup_name* can be used to specify a backup file name if *backup=true*.
+
+ The backup file name will be return on either success or failure of module execution such that data can be retrieved.
+
+ | **required**: False
+ | **type**: bool
+ | **default**: False
+
+
+ backup_name
+ Specify the USS file name or data set name for the destination backup.
+
+ If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name.
+
+ If the source is an MVS data set, the backup_name must be an MVS data set name.
+
+ If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``.
+
+ If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it.
+
+ | **required**: False
+ | **type**: str
+
+
+
+batch
+ A list of dictionaries for adding or removing libraries.
+
+ This is mutually exclusive with ``library``, ``volume``, ``sms``
+
+ Can be used with ``persistent``
+
+ | **required**: False
+ | **type**: list
+ | **elements**: dict
+
+
+ library
+ The library name to be added or removed from the APF list.
+
+ | **required**: True
+ | **type**: str
+
+
+ volume
+ The identifier for the volume containing the library specified on the ``library`` parameter. The values must be one of the following.
+
+ 1. The volume serial number
+
+ 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume.
+
+ 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog.
+
+ If ``volume`` is not specified, ``library`` has to be cataloged.
+
+ | **required**: False
+ | **type**: str
+
+
+ sms
+ Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library.
+
+ If true ``volume`` will be ignored.
+
+ | **required**: False
+ | **type**: bool
+ | **default**: False
+
+
+
+
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Add a library to the APF list
+ zos_apf:
+ library: SOME.SEQUENTIAL.DATASET
+ volume: T12345
+ - name: Add a library (cataloged) to the APF list and persistence
+ zos_apf:
+ library: SOME.SEQUENTIAL.DATASET
+ force_dynamic: true
+ persistent:
+ data_set_name: SOME.PARTITIONED.DATASET(MEM)
+ - name: Remove a library from the APF list and persistence
+ zos_apf:
+ state: absent
+ library: SOME.SEQUENTIAL.DATASET
+ volume: T12345
+ persistent:
+ data_set_name: SOME.PARTITIONED.DATASET(MEM)
+ - name: Batch libraries with custom marker, persistence for the APF list
+ zos_apf:
+ persistent:
+ data_set_name: "SOME.PARTITIONED.DATASET(MEM)"
+ marker: "/* {mark} PROG001 USR0010 */"
+ batch:
+ - library: SOME.SEQ.DS1
+ - library: SOME.SEQ.DS2
+ sms: true
+ - library: SOME.SEQ.DS3
+ volume: T12345
+ - name: Print the APF list matching library pattern or volume serial number
+ zos_apf:
+ operation: list
+ library: SOME.SEQ.*
+ volume: T12345
+ - name: Set the APF list format to STATIC
+ zos_apf:
+ operation: set_static
+
+
+
+
+Notes
+-----
+
+.. note::
+ It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF® FACILITY resource class. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user.
+
+ To add or delete the APF list entry for library libname, you must have UPDATE authority to the RACF® FACILITY resource class entity CSVAPF.libname, or there must be no FACILITY class profile that protects that entity.
+
+ To change the format of the APF list to dynamic, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.DYNAMIC, or there must be no FACILITY class profile that protects that entity.
+
+ To change the format of the APF list back to static, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.STATIC, or there must be no FACILITY class profile that protects that entity.
+
+
+
+
+
+
+
+Return Values
+-------------
+
+
+stdout
+ The stdout from ZOAU command apfadm. Output varies based on the type of operation.
+
+ state> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm
+
+ operation> stdout of operation options list> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set_dynamic> Set to DYNAMIC set_static> Set to STATIC check_format> DYNAMIC or STATIC
+
+ | **returned**: always
+ | **type**: str
+
+stderr
+ The error messages from ZOAU command apfadm
+
+ | **returned**: always
+ | **type**: str
+ | **sample**: BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list.
+
+rc
+ The return code from ZOAU command apfadm
+
+ | **returned**: always
+ | **type**: int
+
+msg
+ The module messages
+
+ | **returned**: failure
+ | **type**: str
+ | **sample**: Parameter verification failed
+
+backup_name
+ Name of the backup file or data set that was created.
+
+ | **returned**: if backup=true, always
+ | **type**: str
+
diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst
index fe93474f0..f2971fc6a 100644
--- a/docs/source/modules/zos_archive.rst
+++ b/docs/source/modules/zos_archive.rst
@@ -78,7 +78,7 @@ format
| **required**: False
| **type**: str
- | **choices**: PACK, SPACK
+ | **choices**: pack, spack
xmit_log_data_set
@@ -203,8 +203,8 @@ dest_data_set
| **required**: False
| **type**: str
- | **default**: SEQ
- | **choices**: SEQ
+ | **default**: seq
+ | **choices**: seq
space_primary
@@ -228,21 +228,21 @@ dest_data_set
space_type
If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space.
- Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``.
+ Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``.
| **required**: False
| **type**: str
- | **choices**: K, M, G, CYL, TRK
+ | **choices**: k, m, g, cyl, trk
record_format
If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``)
- Choices are case-insensitive.
+ Choices are case-sensitive.
| **required**: False
| **type**: str
- | **choices**: FB, VB, FBA, VBA, U
+ | **choices**: fb, vb, fba, vba, u
record_length
@@ -373,8 +373,8 @@ Examples
format:
name: terse
format_options:
- terse_pack: "SPACK"
- use_adrdssu: True
+ terse_pack: "spack"
+ use_adrdssu: true
# Use a pattern to store
- name: Compress data set pattern using xmit
diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst
index cc6c60d66..69ca57cda 100644
--- a/docs/source/modules/zos_backup_restore.rst
+++ b/docs/source/modules/zos_backup_restore.rst
@@ -182,13 +182,13 @@ space
space_type
The unit of measurement to use when defining data set space.
- Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``.
+ Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``.
- When *full_volume=True*, *space_type* defaults to ``G``, otherwise default is ``M``
+ When *full_volume=True*, *space_type* defaults to ``g``, otherwise default is ``m``
| **required**: False
| **type**: str
- | **choices**: K, M, G, CYL, TRK
+ | **choices**: k, m, g, cyl, trk
hlq
@@ -200,6 +200,15 @@ hlq
| **type**: str
+tmp_hlq
+ Override the default high level qualifier (HLQ) for temporary and backup data sets.
+
+ The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used.
+
+ | **required**: False
+ | **type**: str
+
+
Examples
@@ -232,7 +241,7 @@ Examples
data_sets:
include: user.**
backup_name: /tmp/temp_backup.dzp
- recover: yes
+ recover: true
- name: Backup all datasets matching the pattern USER.** to data set MY.BACKUP.DZP,
allocate 100MB for data sets used in backup process.
@@ -242,7 +251,7 @@ Examples
include: user.**
backup_name: MY.BACKUP.DZP
space: 100
- space_type: M
+ space_type: m
- name:
Backup all datasets matching the pattern USER.** that are present on the volume MYVOL1 to data set MY.BACKUP.DZP,
@@ -254,7 +263,7 @@ Examples
volume: MYVOL1
backup_name: MY.BACKUP.DZP
space: 100
- space_type: M
+ space_type: m
- name: Backup an entire volume, MYVOL1, to the UNIX file /tmp/temp_backup.dzp,
allocate 1GB for data sets used in backup process.
@@ -262,9 +271,9 @@ Examples
operation: backup
backup_name: /tmp/temp_backup.dzp
volume: MYVOL1
- full_volume: yes
+ full_volume: true
space: 1
- space_type: G
+ space_type: g
- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp.
Use z/OS username as new HLQ.
@@ -305,10 +314,10 @@ Examples
zos_backup_restore:
operation: restore
volume: MYVOL2
- full_volume: yes
+ full_volume: true
backup_name: MY.BACKUP.DZP
space: 1
- space_type: G
+ space_type: g
- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp.
Specify DB2SMS10 for the SMS storage and management classes to use for the restored
diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst
index 00e274b00..d8319ece9 100644
--- a/docs/source/modules/zos_copy.rst
+++ b/docs/source/modules/zos_copy.rst
@@ -91,7 +91,7 @@ dest
If ``dest`` is a nonexistent USS file, it will be created.
- If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the copy will fail.
+ If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail.
If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option.
@@ -132,7 +132,7 @@ encoding
to
The encoding to be converted to
- | **required**: True
+ | **required**: False
| **type**: str
@@ -234,6 +234,17 @@ local_follow
| **default**: True
+group
+ Name of the group that will own the file system objects.
+
+ When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership.
+
+ This option is only applicable if ``dest`` is USS, otherwise ignored.
+
+ | **required**: False
+ | **type**: str
+
+
mode
The permission of the destination file or directory.
@@ -249,6 +260,17 @@ mode
| **type**: str
+owner
+ Name of the user that should own the filesystem object, as would be passed to the chown command.
+
+ When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership.
+
+ This option is only applicable if ``dest`` is USS, otherwise ignored.
+
+ | **required**: False
+ | **type**: str
+
+
remote_src
If set to ``false``, the module searches for ``src`` at the local machine.
@@ -321,7 +343,7 @@ dest_data_set
| **required**: True
| **type**: str
- | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC, LIBRARY
+ | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, member, basic, library
space_primary
@@ -345,21 +367,21 @@ dest_data_set
space_type
If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space.
- Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``.
+ Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``.
| **required**: False
| **type**: str
- | **choices**: K, M, G, CYL, TRK
+ | **choices**: k, m, g, cyl, trk
record_format
- If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``)
+ If the destination data set does not exist, this sets the format of the data set. (e.g ``fb``)
- Choices are case-insensitive.
+ Choices are case-sensitive.
| **required**: False
| **type**: str
- | **choices**: FB, VB, FBA, VBA, U
+ | **choices**: fb, vb, fba, vba, u
record_length
@@ -390,9 +412,9 @@ dest_data_set
key_offset
The key offset to use when creating a KSDS data set.
- *key_offset* is required when *type=KSDS*.
+ *key_offset* is required when *type=ksds*.
- *key_offset* should only be provided when *type=KSDS*
+ *key_offset* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -401,9 +423,9 @@ dest_data_set
key_length
The key length to use when creating a KSDS data set.
- *key_length* is required when *type=KSDS*.
+ *key_length* is required when *type=ksds*.
- *key_length* should only be provided when *type=KSDS*
+ *key_length* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -613,7 +635,7 @@ Examples
zos_copy:
src: /path/to/foo.conf
dest: /etc/foo.conf
- mode: 0644
+ mode: "0644"
group: foo
owner: bar
@@ -743,11 +765,11 @@ Examples
remote_src: true
volume: '222222'
dest_data_set:
- type: SEQ
+ type: seq
space_primary: 10
space_secondary: 3
- space_type: K
- record_format: VB
+ space_type: k
+ record_format: vb
record_length: 150
- name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL
@@ -846,12 +868,12 @@ destination_attributes
{
"block_size": 32760,
- "record_format": "FB",
+ "record_format": "fb",
"record_length": 45,
"space_primary": 2,
"space_secondary": 1,
- "space_type": "K",
- "type": "PDSE"
+ "space_type": "k",
+ "type": "pdse"
}
block_size
@@ -864,7 +886,7 @@ destination_attributes
Record format of the dataset.
| **type**: str
- | **sample**: FB
+ | **sample**: fb
record_length
Record length of the dataset.
@@ -888,13 +910,13 @@ destination_attributes
Unit of measurement for space.
| **type**: str
- | **sample**: K
+ | **sample**: k
type
Type of dataset allocated.
| **type**: str
- | **sample**: PDSE
+ | **sample**: pdse
checksum
diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst
index 70e798a08..34162d72e 100644
--- a/docs/source/modules/zos_data_set.rst
+++ b/docs/source/modules/zos_data_set.rst
@@ -32,7 +32,7 @@ name
If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username.
- Required if *type=MEMBER* or *state!=present* and not using *batch*.
+ Required if *type=member* or *state!=present* and not using *batch*.
| **required**: False
| **type**: str
@@ -47,7 +47,7 @@ state
If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*.
- If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted.
+ If *state=absent* and *type=member* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted.
If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*.
@@ -68,7 +68,7 @@ state
If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*.
- If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable.
+ If *state=present* and *type=member* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable.
If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*.
@@ -93,16 +93,16 @@ state
type
- The data set type to be used when creating a data set. (e.g ``pdse``)
+ The data set type to be used when creating a data set. (e.g ``pdse``).
- ``MEMBER`` expects to be used with an existing partitioned data set.
+ ``member`` expects to be used with an existing partitioned data set.
- Choices are case-insensitive.
+ Choices are case-sensitive.
| **required**: False
| **type**: str
- | **default**: PDS
- | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, LIBRARY, BASIC, LARGE, MEMBER, HFS, ZFS
+ | **default**: pds
+ | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs
space_primary
@@ -128,25 +128,25 @@ space_secondary
space_type
The unit of measurement to use when defining primary and secondary space.
- Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``.
+ Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``.
| **required**: False
| **type**: str
- | **default**: M
- | **choices**: K, M, G, CYL, TRK
+ | **default**: m
+ | **choices**: k, m, g, cyl, trk
record_format
The format of the data set. (e.g ``FB``)
- Choices are case-insensitive.
+ Choices are case-sensitive.
- When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*.
+ When *type=ksds*, *type=esds*, *type=rrds*, *type=lds* or *type=zfs* then *record_format=None*, these types do not have a default *record_format*.
| **required**: False
| **type**: str
- | **default**: FB
- | **choices**: FB, VB, FBA, VBA, U, F
+ | **default**: fb
+ | **choices**: fb, vb, fba, vba, u, f
sms_storage_class
@@ -216,9 +216,9 @@ directory_blocks
key_offset
The key offset to use when creating a KSDS data set.
- *key_offset* is required when *type=KSDS*.
+ *key_offset* is required when *type=ksds*.
- *key_offset* should only be provided when *type=KSDS*
+ *key_offset* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -227,9 +227,9 @@ key_offset
key_length
The key length to use when creating a KSDS data set.
- *key_length* is required when *type=KSDS*.
+ *key_length* is required when *type=ksds*.
- *key_length* should only be provided when *type=KSDS*
+ *key_length* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -285,7 +285,7 @@ force
The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*.
- The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*.
+ The *force=True* only applies to data set members when *state=absent* and *type=member*.
| **required**: False
| **type**: bool
@@ -305,7 +305,7 @@ batch
If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username.
- Required if *type=MEMBER* or *state!=present*
+ Required if *type=member* or *state!=present*
| **required**: False
| **type**: str
@@ -320,7 +320,7 @@ batch
If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*.
- If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted.
+ If *state=absent* and *type=member* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted.
If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*.
@@ -341,7 +341,7 @@ batch
If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*.
- If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable.
+ If *state=present* and *type=member* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable.
If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*.
@@ -366,16 +366,16 @@ batch
type
- The data set type to be used when creating a data set. (e.g ``PDSE``)
+ The data set type to be used when creating a data set. (e.g ``pdse``)
- ``MEMBER`` expects to be used with an existing partitioned data set.
+ ``member`` expects to be used with an existing partitioned data set.
- Choices are case-insensitive.
+ Choices are case-sensitive.
| **required**: False
| **type**: str
- | **default**: PDS
- | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, LIBRARY, BASIC, LARGE, MEMBER, HFS, ZFS
+ | **default**: pds
+ | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs
space_primary
@@ -401,25 +401,25 @@ batch
space_type
The unit of measurement to use when defining primary and secondary space.
- Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``.
+ Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``.
| **required**: False
| **type**: str
- | **default**: M
- | **choices**: K, M, G, CYL, TRK
+ | **default**: m
+ | **choices**: k, m, g, cyl, trk
record_format
The format of the data set. (e.g ``FB``)
- Choices are case-insensitive.
+ Choices are case-sensitive.
- When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*.
+ When *type=ksds*, *type=esds*, *type=rrds*, *type=lds* or *type=zfs* then *record_format=None*, these types do not have a default *record_format*.
| **required**: False
| **type**: str
- | **default**: FB
- | **choices**: FB, VB, FBA, VBA, U, F
+ | **default**: fb
+ | **choices**: fb, vb, fba, vba, u, f
sms_storage_class
@@ -489,9 +489,9 @@ batch
key_offset
The key offset to use when creating a KSDS data set.
- *key_offset* is required when *type=KSDS*.
+ *key_offset* is required when *type=ksds*.
- *key_offset* should only be provided when *type=KSDS*
+ *key_offset* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -500,9 +500,9 @@ batch
key_length
The key length to use when creating a KSDS data set.
- *key_length* is required when *type=KSDS*.
+ *key_length* is required when *type=ksds*.
- *key_length* should only be provided when *type=KSDS*
+ *key_length* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -549,7 +549,7 @@ batch
The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*.
- The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*.
+ The *force=True* only applies to data set members when *state=absent* and *type=member*.
| **required**: False
| **type**: bool
@@ -576,7 +576,7 @@ Examples
name: someds.name.here
type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: fba
record_length: 25
@@ -585,21 +585,21 @@ Examples
name: someds.name.here
type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: u
record_length: 25
- replace: yes
+ replace: true
- name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found.
zos_data_set:
name: someds.name.here
type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: u
record_length: 25
volumes: "222222"
- replace: yes
+ replace: true
- name: Create an ESDS data set if it does not exist
zos_data_set:
@@ -633,43 +633,43 @@ Examples
- name: Write a member to an existing PDS; replace if member exists
zos_data_set:
name: someds.name.here(mydata)
- type: MEMBER
- replace: yes
+ type: member
+ replace: true
- name: Write a member to an existing PDS; do not replace if member exists
zos_data_set:
name: someds.name.here(mydata)
- type: MEMBER
+ type: member
- name: Remove a member from an existing PDS
zos_data_set:
name: someds.name.here(mydata)
state: absent
- type: MEMBER
+ type: member
- name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR
zos_data_set:
name: someds.name.here(mydata)
state: absent
- type: MEMBER
- force: yes
+ type: member
+ force: true
- name: Create multiple partitioned data sets and add one or more members to each
zos_data_set:
batch:
- - name: someds.name.here1
- type: PDS
+ - name: someds.name.here1
+ type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: fb
- replace: yes
+ replace: true
- name: someds.name.here1(member1)
- type: MEMBER
+ type: member
- name: someds.name.here2(member1)
- type: MEMBER
- replace: yes
+ type: member
+ replace: true
- name: someds.name.here2(member2)
- type: MEMBER
+ type: member
- name: Catalog a data set present on volume 222222 if it is uncataloged.
zos_data_set:
diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst
index 4c2294e24..2134b336e 100644
--- a/docs/source/modules/zos_encode.rst
+++ b/docs/source/modules/zos_encode.rst
@@ -143,8 +143,8 @@ Examples
encoding:
from: IBM-1047
to: ISO8859-1
- backup: yes
- backup_compress: yes
+ backup: true
+ backup_compress: true
- name: Convert file encoding from IBM-1047 to ISO8859-1 to a directory
zos_encode:
@@ -256,7 +256,6 @@ Examples
-
Notes
-----
diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst
index 8f779d391..a6f55acf9 100644
--- a/docs/source/modules/zos_job_submit.rst
+++ b/docs/source/modules/zos_job_submit.rst
@@ -42,30 +42,18 @@ src
location
- The JCL location. Supported choices are ``DATA_SET``, ``USS`` or ``LOCAL``.
+ The JCL location. Supported choices are ``data_set``, ``uss`` or ``local``.
- DATA_SET can be a PDS, PDSE, or sequential data set.
+ ``data_set`` can be a PDS, PDSE, or sequential data set.
- USS means the JCL location is located in UNIX System Services (USS).
+ ``uss`` means the JCL location is located in UNIX System Services (USS).
- LOCAL means locally to the ansible control node.
+ ``local`` means locally to the ansible control node.
| **required**: False
| **type**: str
- | **default**: DATA_SET
- | **choices**: DATA_SET, USS, LOCAL
-
-
-wait
- Setting this option will yield no change, it is disabled. There is no need to set *wait*; setting *wait_times_s* is the correct way to configure the amount of time to wait for a job to execute.
-
- This option will be removed in ibm.ibm_zos_core collection version 1.10.0
-
- See option *wait_time_s*.
-
- | **required**: False
- | **type**: bool
- | **default**: False
+ | **default**: data_set
+ | **choices**: data_set, uss, local
wait_time_s
@@ -96,11 +84,11 @@ return_output
volume
- The volume serial (VOLSER)is where the data set resides. The option is required only when the data set is not cataloged on the system.
+ The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system.
When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail.
- Ignored for *location=USS* and *location=LOCAL*.
+ Ignored for *location=uss* and *location=local*.
| **required**: False
| **type**: str
@@ -109,7 +97,7 @@ volume
encoding
Specifies which encoding the local JCL file should be converted from and to, before submitting the job.
- This option is only supported for when *location=LOCAL*.
+ This option is only supported for when *location=local*.
If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding.
@@ -279,22 +267,22 @@ Examples
.. code-block:: yaml+jinja
- - name: Submit JCL in a PDSE member
+ - name: Submit JCL in a PDSE member.
zos_job_submit:
src: HLQ.DATA.LLQ(SAMPLE)
- location: DATA_SET
+ location: data_set
register: response
- name: Submit JCL in USS with no DDs in the output.
zos_job_submit:
src: /u/tester/demo/sample.jcl
- location: USS
+ location: uss
return_output: false
- name: Convert local JCL to IBM-037 and submit the job.
zos_job_submit:
src: /Users/maxy/ansible-playbooks/provision/sample.jcl
- location: LOCAL
+ location: local
encoding:
from: ISO8859-1
to: IBM-037
@@ -302,25 +290,25 @@ Examples
- name: Submit JCL in an uncataloged PDSE on volume P2SS01.
zos_job_submit:
src: HLQ.DATA.LLQ(SAMPLE)
- location: DATA_SET
+ location: data_set
volume: P2SS01
- name: Submit a long running PDS job and wait up to 30 seconds for completion.
zos_job_submit:
src: HLQ.DATA.LLQ(LONGRUN)
- location: DATA_SET
+ location: data_set
wait_time_s: 30
- name: Submit a long running PDS job and wait up to 30 seconds for completion.
zos_job_submit:
src: HLQ.DATA.LLQ(LONGRUN)
- location: DATA_SET
+ location: data_set
wait_time_s: 30
- name: Submit JCL and set the max return code the module should fail on to 16.
zos_job_submit:
src: HLQ.DATA.LLQ
- location: DATA_SET
+ location: data_set
max_rc: 16
@@ -712,24 +700,30 @@ jobs
Job status `?` indicates status can not be determined.
+ Jobs where status can not be determined will result in None (NULL).
+
| **type**: str
| **sample**: AC
msg_code
The return code from the submitted job as a string.
+ Jobs which have no return code will result in None (NULL), such is the case of a job that errors or is active.
+
| **type**: str
msg_txt
Returns additional information related to the submitted job.
+ Jobs which have no additional information will result in None (NULL).
+
| **type**: str
| **sample**: The job JOB00551 was run with special job processing TYPRUN=SCAN. This will result in no completion, return code or job steps and changed will be false.
code
The return code converted to an integer value when available.
- Jobs which have no return code will return NULL, such is the case of a job that errors or is active.
+ Jobs which have no return code will result in None (NULL), such is the case of a job that errors or is active.
| **type**: int
@@ -800,10 +794,3 @@ jobs
| **sample**: IEBGENER
-message
- This option is being deprecated
-
- | **returned**: success
- | **type**: str
- | **sample**: Submit JCL operation succeeded.
-
diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst
index f7005017e..4e416f97f 100644
--- a/docs/source/modules/zos_lineinfile.rst
+++ b/docs/source/modules/zos_lineinfile.rst
@@ -239,15 +239,14 @@ Examples
src: /tmp/src/somefile
regexp: '^(.*)User(\d+)m(.*)$'
line: '\1APPUser\3'
- backrefs: yes
+ backrefs: true
- name: Add a line to a member while a task is in execution
zos_lineinfile:
src: SOME.PARTITIONED.DATA.SET(DATA)
insertafter: EOF
line: 'Should be a working test now'
- force: True
-
+ force: true
diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst
index 42e8a8ea6..3b30be909 100644
--- a/docs/source/modules/zos_mount.rst
+++ b/docs/source/modules/zos_mount.rst
@@ -53,11 +53,11 @@ fs_type
The physical file systems data set format to perform the logical mount.
- The *fs_type* is required to be uppercase.
+ The *fs_type* is required to be lowercase.
| **required**: True
| **type**: str
- | **choices**: HFS, ZFS, NFS, TFS
+ | **choices**: hfs, zfs, nfs, tfs
state
@@ -149,25 +149,25 @@ unmount_opts
| **required**: False
| **type**: str
- | **default**: NORMAL
- | **choices**: DRAIN, FORCE, IMMEDIATE, NORMAL, REMOUNT, RESET
+ | **default**: normal
+ | **choices**: drain, force, immediate, normal, remount, reset
mount_opts
Options available to the mount.
- If *mount_opts=RO* on a mounted/remount, mount is performed read-only.
+ If *mount_opts=ro* on a mounted/remount, mount is performed read-only.
- If *mount_opts=SAME* and (unmount_opts=REMOUNT), mount is opened in the same mode as previously opened.
+ If *mount_opts=same* and (unmount_opts=remount), mount is opened in the same mode as previously opened.
- If *mount_opts=NOWAIT*, mount is performed asynchronously.
+ If *mount_opts=nowait*, mount is performed asynchronously.
- If *mount_opts=NOSECURITY*, security checks are not enforced for files in this file system.
+ If *mount_opts=nosecurity*, security checks are not enforced for files in this file system.
| **required**: False
| **type**: str
- | **default**: RW
- | **choices**: RO, RW, SAME, NOWAIT, NOSECURITY
+ | **default**: rw
+ | **choices**: ro, rw, same, nowait, nosecurity
src_params
@@ -184,15 +184,15 @@ tag_untagged
When the file system is unmounted, the tags are lost.
- If *tag_untagged=NOTEXT* none of the untagged files in the file system are automatically converted during file reading and writing.
+ If *tag_untagged=notext* none of the untagged files in the file system are automatically converted during file reading and writing.
- If *tag_untagged=TEXT* each untagged file is implicitly marked as containing pure text data that can be converted.
+ If *tag_untagged=text* each untagged file is implicitly marked as containing pure text data that can be converted.
If this flag is used, use of tag_ccsid is encouraged.
| **required**: False
| **type**: str
- | **choices**: TEXT, NOTEXT
+ | **choices**: text, notext
tag_ccsid
@@ -240,23 +240,23 @@ automove
These parameters apply only in a sysplex where systems are exploiting the shared file system capability. They specify what happens to the ownership of a file system when a shutdown, PFS termination, dead system takeover, or file system move occurs. The default setting is AUTOMOVE where the file system will be randomly moved to another system (no system list used).
- *automove=AUTOMOVE* indicates that ownership of the file system can be automatically moved to another system participating in a shared file system.
+ *automove=automove* indicates that ownership of the file system can be automatically moved to another system participating in a shared file system.
- *automove=NOAUTOMOVE* prevents movement of the file system's ownership in some situations.
+ *automove=noautomove* prevents movement of the file system's ownership in some situations.
- *automove=UNMOUNT* allows the file system to be unmounted in some situations.
+ *automove=unmount* allows the file system to be unmounted in some situations.
| **required**: False
| **type**: str
- | **default**: AUTOMOVE
- | **choices**: AUTOMOVE, NOAUTOMOVE, UNMOUNT
+ | **default**: automove
+ | **choices**: automove, noautomove, unmount
automove_list
- If(automove=AUTOMOVE), this option will be checked.
+ If(automove=automove), this option will be checked.
This specifies the list of servers to include or exclude as destinations.
@@ -293,14 +293,14 @@ Examples
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
- name: Unmount a filesystem.
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: unmounted
unmount_opts: REMOUNT
opts: same
@@ -309,7 +309,7 @@ Examples
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
mount_opts: RO
@@ -317,37 +317,37 @@ Examples
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
persistent:
- data_store: SYS1.PARMLIB(BPXPRMAA)
- comment: For Tape2 project
+ data_store: SYS1.PARMLIB(BPXPRMAA)
+ comment: For Tape2 project
- name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB.
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
persistent:
- data_store: SYS1.PARMLIB(BPXPRMAA)
- backup: Yes
- backup_name: SYS1.PARMLIB(BPXPRMAB)
- comment: For Tape2 project
+ data_store: SYS1.PARMLIB(BPXPRMAA)
+ backup: true
+ backup_name: SYS1.PARMLIB(BPXPRMAB)
+ comment: For Tape2 project
- name: Mount a filesystem ignoring uid/gid values.
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
- allow_uid: no
+ allow_uid: false
- name: Mount a filesystem asynchronously (don't wait for completion).
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
opts: nowait
@@ -355,7 +355,7 @@ Examples
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
mount_opts: NOSECURITY
@@ -363,7 +363,7 @@ Examples
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
automove: AUTOMOVE
automove_list: I,DEV1,DEV2,DEV3,DEV9
@@ -372,7 +372,7 @@ Examples
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
automove: AUTOMOVE
automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7
@@ -380,7 +380,6 @@ Examples
-
Notes
-----
diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst
index 3ebedadd5..d98c9493b 100644
--- a/docs/source/modules/zos_mvs_raw.rst
+++ b/docs/source/modules/zos_mvs_raw.rst
@@ -134,7 +134,7 @@ dds
| **required**: False
| **type**: str
- | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog
+ | **choices**: delete, keep, catalog, uncatalog
disposition_abnormal
@@ -142,11 +142,11 @@ dds
| **required**: False
| **type**: str
- | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog
+ | **choices**: delete, keep, catalog, uncatalog
reuse
- Determines if a data set should be reused if *disposition=NEW* and if a data set with a matching name already exists.
+ Determines if a data set should be reused if *disposition=new* and if a data set with a matching name already exists.
If *reuse=true*, *disposition* will be automatically switched to ``SHR``.
@@ -154,7 +154,7 @@ dds
Mutually exclusive with *replace*.
- *reuse* is only considered when *disposition=NEW*
+ *reuse* is only considered when *disposition=new*
| **required**: False
| **type**: bool
@@ -162,7 +162,7 @@ dds
replace
- Determines if a data set should be replaced if *disposition=NEW* and a data set with a matching name already exists.
+ Determines if a data set should be replaced if *disposition=new* and a data set with a matching name already exists.
If *replace=true*, the original data set will be deleted, and a new data set created.
@@ -170,7 +170,7 @@ dds
Mutually exclusive with *reuse*.
- *replace* is only considered when *disposition=NEW*
+ *replace* is only considered when *disposition=new*
*replace* will result in loss of all data in the original data set unless *backup* is specified.
@@ -180,7 +180,7 @@ dds
backup
- Determines if a backup should be made of an existing data set when *disposition=NEW*, *replace=true*, and a data set with the desired name is found.
+ Determines if a backup should be made of an existing data set when *disposition=new*, *replace=true*, and a data set with the desired name is found.
*backup* is only used when *replace=true*.
@@ -315,7 +315,7 @@ dds
encoding
How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager.
- *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding.
+ *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding.
Maps to KEYCD1 on z/OS.
@@ -350,7 +350,7 @@ dds
encoding
How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager.
- *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding.
+ *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding.
Maps to KEYCD2 on z/OS.
@@ -389,7 +389,7 @@ dds
Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0.
- Valid values are (1-32760 for non-vsam, 1-32761 for vsam).
+ Valid values are (1-32760 for non-VSAM, 1-32761 for VSAM).
Maps to LRECL on z/OS.
@@ -869,7 +869,7 @@ dds
| **required**: False
| **type**: str
- | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog
+ | **choices**: delete, keep, catalog, uncatalog
disposition_abnormal
@@ -877,11 +877,11 @@ dds
| **required**: False
| **type**: str
- | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog
+ | **choices**: delete, keep, catalog, uncatalog
reuse
- Determines if data set should be reused if *disposition=NEW* and a data set with matching name already exists.
+ Determines if data set should be reused if *disposition=new* and a data set with matching name already exists.
If *reuse=true*, *disposition* will be automatically switched to ``SHR``.
@@ -889,7 +889,7 @@ dds
Mutually exclusive with *replace*.
- *reuse* is only considered when *disposition=NEW*
+ *reuse* is only considered when *disposition=new*
| **required**: False
| **type**: bool
@@ -897,7 +897,7 @@ dds
replace
- Determines if data set should be replaced if *disposition=NEW* and a data set with matching name already exists.
+ Determines if data set should be replaced if *disposition=new* and a data set with matching name already exists.
If *replace=true*, the original data set will be deleted, and a new data set created.
@@ -905,7 +905,7 @@ dds
Mutually exclusive with *reuse*.
- *replace* is only considered when *disposition=NEW*
+ *replace* is only considered when *disposition=new*
*replace* will result in loss of all data in the original data set unless *backup* is specified.
@@ -915,7 +915,7 @@ dds
backup
- Determines if a backup should be made of existing data set when *disposition=NEW*, *replace=true*, and a data set with the desired name is found.
+ Determines if a backup should be made of existing data set when *disposition=new*, *replace=true*, and a data set with the desired name is found.
*backup* is only used when *replace=true*.
@@ -1050,7 +1050,7 @@ dds
encoding
How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager.
- *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding.
+ *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding.
Maps to KEYCD1 on z/OS.
@@ -1085,7 +1085,7 @@ dds
encoding
How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager.
- *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding.
+ *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding.
Maps to KEYCD2 on z/OS.
@@ -1191,7 +1191,7 @@ dds
path
The path to an existing UNIX file.
- Or provide the path to an new created UNIX file when *status_group=OCREAT*.
+ Or provide the path to an new created UNIX file when *status_group=ocreat*.
The provided path must be absolute.
@@ -1464,7 +1464,7 @@ Examples
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- reuse: yes
+ reuse: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1488,7 +1488,7 @@ Examples
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- reuse: yes
+ reuse: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1533,7 +1533,7 @@ Examples
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- reuse: yes
+ reuse: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1561,8 +1561,8 @@ Examples
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- replace: yes
- backup: yes
+ replace: true
+ backup: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1633,7 +1633,7 @@ Examples
- name: Take a set of data sets and write them to an archive.
zos_mvs_raw:
program_name: adrdssu
- auth: yes
+ auth: true
dds:
- dd_data_set:
dd_name: archive
@@ -1649,7 +1649,7 @@ Examples
- name: Merge two sequential data sets and write them to new data set
zos_mvs_raw:
program_name: sort
- auth: no
+ auth: false
parm: "MSGPRT=CRITICAL,LIST"
dds:
- dd_data_set:
@@ -1680,7 +1680,7 @@ Examples
files.
zos_mvs_raw:
pgm: idcams
- auth: yes
+ auth: true
dds:
- dd_concat:
dd_name: sysprint
@@ -1697,57 +1697,56 @@ Examples
dd_name: sysin
content: " LISTCAT ENTRIES('SYS1.*')"
- - name: Drop the contents of input dataset into output dataset
- using REPRO command.
+ - name: Drop the contents of input dataset into output dataset using REPRO command.
zos_mvs_raw:
pgm: idcams
- auth: yes
+ auth: true
dds:
- - dd_data_set:
- dd_name: INPUT
- data_set_name: myhlq.ds1.input
- - dd_data_set:
- dd_name: OUTPUT
- data_set_name: myhlq.ds1.output
- - dd_input:
- dd_name: sysin
- content: |
+ - dd_data_set:
+ dd_name: INPUT
+ data_set_name: myhlq.ds1.input
+ - dd_data_set:
+ dd_name: OUTPUT
+ data_set_name: myhlq.ds1.output
+ - dd_input:
+ dd_name: sysin
+ content: |
" REPRO -
INFILE(INPUT) -
OUTFILE(OUTPUT)"
- - dd_output:
- dd_name: sysprint
- return_content:
- type: text
-
- - name: Define a cluster using a literal block style indicator
- with a 2 space indentation.
- zos_mvs_raw:
- program_name: idcams
- auth: yes
- dds:
- - dd_output:
- dd_name: sysprint
- return_content:
- type: text
- - dd_input:
- dd_name: sysin
- content: |2
- DEFINE CLUSTER -
- (NAME(ANSIBLE.TEST.VSAM) -
- CYL(10 10) -
- FREESPACE(20 20) -
- INDEXED -
- KEYS(32 0) -
- NOERASE -
- NONSPANNED -
- NOREUSE -
- SHAREOPTIONS(3 3) -
- SPEED -
- UNORDERED -
- RECORDSIZE(4086 32600) -
- VOLUMES(222222) -
- UNIQUE)
+ - dd_output:
+ dd_name: sysprint
+ return_content:
+ type: text
+
+ - name: Define a cluster using a literal block style indicator
+ with a 2 space indentation.
+ zos_mvs_raw:
+ program_name: idcams
+ auth: true
+ dds:
+ - dd_output:
+ dd_name: sysprint
+ return_content:
+ type: text
+ - dd_input:
+ dd_name: sysin
+ content: 2
+ DEFINE CLUSTER -
+ (NAME(ANSIBLE.TEST.VSAM) -
+ CYL(10 10) -
+ FREESPACE(20 20) -
+ INDEXED -
+ KEYS(32 0) -
+ NOERASE -
+ NONSPANNED -
+ NOREUSE -
+ SHAREOPTIONS(3 3) -
+ SPEED -
+ UNORDERED -
+ RECORDSIZE(4086 32600) -
+ VOLUMES(222222) -
+ UNIQUE)
diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst
index b2e99d399..ba9398b50 100644
--- a/docs/source/modules/zos_operator_action_query.rst
+++ b/docs/source/modules/zos_operator_action_query.rst
@@ -128,7 +128,7 @@ Examples
system: mv29
message_filter:
filter: ^.*IMS.*$
- use_regex: yes
+ use_regex: true
diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst
index 91fa597ee..f2d7aba8b 100644
--- a/docs/source/modules/zos_unarchive.rst
+++ b/docs/source/modules/zos_unarchive.rst
@@ -195,8 +195,8 @@ dest_data_set
| **required**: False
| **type**: str
- | **default**: SEQ
- | **choices**: SEQ, PDS, PDSE
+ | **default**: seq
+ | **choices**: seq, pds, pdse
space_primary
@@ -220,21 +220,21 @@ dest_data_set
space_type
If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space.
- Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``.
+ Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``.
| **required**: False
| **type**: str
- | **choices**: K, M, G, CYL, TRK
+ | **choices**: k, m, g, cyl, trk
record_format
- If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``)
+ If the destination data set does not exist, this sets the format of the data set. (e.g ``fb``)
- Choices are case-insensitive.
+ Choices are case-sensitive.
| **required**: False
| **type**: str
- | **choices**: FB, VB, FBA, VBA, U
+ | **choices**: fb, vb, fba, vba, u
record_length
@@ -265,9 +265,9 @@ dest_data_set
key_offset
The key offset to use when creating a KSDS data set.
- *key_offset* is required when *type=KSDS*.
+ *key_offset* is required when *type=ksds*.
- *key_offset* should only be provided when *type=KSDS*
+ *key_offset* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -276,9 +276,9 @@ dest_data_set
key_length
The key length to use when creating a KSDS data set.
- *key_length* is required when *type=KSDS*.
+ *key_length* is required when *type=ksds*.
- *key_length* should only be provided when *type=KSDS*
+ *key_length* should only be provided when *type=ksds*
| **required**: False
| **type**: int
@@ -392,8 +392,8 @@ Examples
format:
name: xmit
format_options:
- use_adrdssu: True
- list: True
+ use_adrdssu: true
+ list: true
diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst
index 195435924..5647ad998 100644
--- a/docs/source/modules/zos_volume_init.rst
+++ b/docs/source/modules/zos_volume_init.rst
@@ -159,14 +159,14 @@ Examples
zos_volume_init:
address: "1234"
volid: "DEMO01"
- sms_managed: no
+ sms_managed: false
- name: Initialize non-SMS managed target volume with all the default options and
override the default high level qualifier (HLQ).
zos_volume_init:
address: 1234
volid: DEMO01
- sms_managed: no
+ sms_managed: false
tmp_hlq: TESTUSR
- name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as
@@ -175,12 +175,12 @@ Examples
zos_volume_init:
address: e8d8
vtoc_size: 30
- index: yes
- sms_managed: yes
+ index: true
+ sms_managed: true
volid: ine8d8
verify_volid: ine8d8
- verify_volume_empty: yes
- verify_offline: no
+ verify_volume_empty: true
+ verify_offline: false
- name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03'
using Ansible loops.
diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst
index 0b742e816..b2634ab55 100644
--- a/docs/source/plugins.rst
+++ b/docs/source/plugins.rst
@@ -19,9 +19,9 @@ user action is required, this documentation is reference only.
* `zos_copy`_: Used to copy data from the controller to the z/OS manage node.
* `zos_fetch`_: Used to fetch data from the z/OS managed node to the controller.
* `zos_job_submit`_: Used to submit a job from the controller to the z/OS manage node.
-* `zos_ping`: Used to transfer the modules REXX source to the z/OS managed node.
-* `zos_script`: Used to transfer scripts from the controller to the z/OS manage node.
-* `_zos_unarchive`: Used to transfer archives from the controller to the z/OS manage node.
+* `zos_ping`_: Used to transfer the modules REXX source to the z/OS managed node.
+* `zos_script`_: Used to transfer scripts from the controller to the z/OS manage node.
+* `zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node.
.. _zos_copy:
modules/zos_copy.html
@@ -35,3 +35,4 @@ user action is required, this documentation is reference only.
modules/zos_script.html
.. _zos_unarchive:
modules/zos_unarchive.html
+
diff --git a/docs/source/reference/community.rst b/docs/source/reference/community.rst
new file mode 100644
index 000000000..9c09aeeab
--- /dev/null
+++ b/docs/source/reference/community.rst
@@ -0,0 +1,17 @@
+.. ...........................................................................
+.. © Copyright IBM Corporation 2020, 2021 .
+.. ...........................................................................
+
+============
+Contributing
+============
+
+Contributing to collections as a member of the open source community varies for
+each collection. Although the collections come together as a unified solution,
+each offering operates on its own; therefore, review the individual collections to learn
+how to contribute.
+
+.. toctree::
+ :maxdepth: 1
+
+ z/OS core
\ No newline at end of file
diff --git a/docs/source/reference/documentation.rst b/docs/source/reference/documentation.rst
new file mode 100644
index 000000000..9e16806b3
--- /dev/null
+++ b/docs/source/reference/documentation.rst
@@ -0,0 +1,18 @@
+.. ...........................................................................
+.. © Copyright IBM Corporation 2024 .
+.. ...........................................................................
+
+=============
+Documentation
+=============
+
+In addition to the common reference material included in Helpful Links,
+each collection in the **Red Hat® Ansible Certified Content for IBM Z**
+includes supplementary documentation specific to the collection. Examples of
+such documentation include Web Services APIs, guidelines for development and
+testing the modules, offering-specific reading, etc.
+
+.. toctree::
+ :maxdepth: 1
+
+ z/OS core <../resources/resources>
diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst
index 7c2c3a929..87d707181 100644
--- a/docs/source/release_notes.rst
+++ b/docs/source/release_notes.rst
@@ -1,11 +1,109 @@
.. ...........................................................................
-.. © Copyright IBM Corporation 2020, 2024 .
+.. © Copyright IBM Corporation 2020, 2024 .
.. ...........................................................................
========
Releases
========
+Version 1.10.0-beta.1
+=====================
+
+Major Changes
+-------------
+
+- Starting with IBM Ansible z/OS core version 1.10.x, ZOAU version 1.3.0 will be required.
+- Starting with IBM Ansible z/OS core version 1.10.x, all module options are case sensitive,
+ review the porting guide for specifics.
+- The README has been updated with a new template.
+- The **Reference** section has been renamed to **Requirements** and now includes a support matrix.
+
+Minor Changes
+-------------
+
+- ``zos_apf`` - Enhanced error messages when an exception is caught.
+- ``zos_backup_restore`` - Added option **tmp_hlq** to the user module to override the default high level qualifier (HLQ) for temporary and backup data sets.
+- ``zos_copy`` - Documented module options `group` and `owner`.
+
+Bugfixes
+--------
+
+- ``zos_apf`` - Option **list** previously only returned one data set, now it returns a list of retrieved data sets.
+- ``zos_blockinfile`` - Option **block** when containing double double quotation marks results in a task failure (failed=True); now the module handles this case to avoid failure.
+- ``zos_find`` - Option **size** failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported.
+
+- ``zos_job_submit``
+
+ - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET.
+ - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status.
+
+- ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution.
+
+Porting Guide
+-------------
+
+This section discusses the behavioral changes between ``ibm_zos_core`` v1.9.0 and ``ibm_zos_core`` v1.10.0-beta.1.
+It is intended to assist in updating your playbooks so this collection will continue to work.
+
+- ``zos_archive``
+
+ - option **terse_pack** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+
+- ``zos_backup_restore`` - option **space_type** no longer accepts uppercase choices, users should replace them with lowercase ones.
+
+- ``zos_copy``
+
+ - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+
+- ``zos_data_set``
+
+ - option **record_format** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - option **space_type** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - option **type** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - options inside **batch** no longer accept uppercase choices, users should replace them with lowercase ones.
+
+- ``zos_job_submit`` - option **location** no longer accepts uppercase choices, users should replace them with lowercase ones.
+
+- ``zos_mount``
+
+ - option **automove** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - option **fs_type** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - option **mount_opts** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - option **tag_untagged** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - option **unmount_opts** no longer accepts uppercase choices, users should replace them with lowercase ones.
+
+- ``zos_mvs_raw``
+
+ - options inside **dd_concat** no longer accept uppercase choices, users should replace them with lowercase ones.
+ - suboption **record_format** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **record_format** of **dd_unix** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **space_type** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **type** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboptions **disposition_normal** and **disposition_abnormal** of **dd_data_set** no longer accept **catlg** and **uncatlg** as choices. This also applies when defining a **dd_data_set** inside **dd_concat**.
+
+- ``zos_unarchive``
+
+ - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+ - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones.
+
+Availability
+------------
+
+* `Galaxy`_
+* `GitHub`_
+
+Requirements
+------------
+
+The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the
+controller and z/OS managed node dependencies.
+
Version 1.9.0
=============
@@ -125,13 +223,11 @@ Availability
* `Galaxy`_
* `GitHub`_
-Reference
----------
+Requirements
+------------
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_
-* Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3.
+The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the
+controller and z/OS managed node dependencies.
Version 1.8.0
=============
@@ -211,13 +307,11 @@ Availability
* `Galaxy`_
* `GitHub`_
-Reference
----------
+Requirements
+------------
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_
-* Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3.
+The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the
+controller and z/OS managed node dependencies.
Version 1.7.0
=============
@@ -278,13 +372,11 @@ Availability
* `Galaxy`_
* `GitHub`_
-Reference
----------
+Requirements
+------------
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_
-* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3.
+The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the
+controller and z/OS managed node dependencies.
Version 1.6.0
=============
@@ -341,13 +433,11 @@ Availability
* `Galaxy`_
* `GitHub`_
-Reference
----------
+Requirements
+------------
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_
-* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3.
+The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the
+controller and z/OS managed node dependencies.
Version 1.5.0
=============
@@ -457,562 +547,11 @@ Availability
* `Galaxy`_
* `GitHub`_
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_
-* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3.
-
-Version 1.4.1
-=============
-
-Bug fixes
----------
-
-* ``zos_copy``
-
- * Copy failed from a loadlib member to another loadlib member. Fix
- now looks for error in stdout in the if statement to use -X option.
- * Fixes a bug where files not encoded in IBM-1047 would trigger an
- error while computing the record length for a new destination dataset.
- * Fixes a bug where the code for fixing an issue with newlines in
- files.
- * fixed wrongful creation of destination backups when module option
- `force` is true, creating emergency backups meant to restore the system to
- its initial state in case of a module failure only when force is false.
- * fixes a bug where the computed record length for a new destination
- dataset would include newline characters.
-
-* ``zos_job_query``
-
- * fixes a bug where a boolean was not being properly compared.
-
-Availability
-------------
-
-* `Automation Hub`_
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Version 1.4.0
-=============
-
-* Modules
-
- * ``zos_mount`` can manage mount operations for a
- z/OS UNIX System Services (USS) file system data set.
-
-* Plugins
-
- * ``zos_ssh`` connection plugin has been removed from this release and is no
- longer a dependency for the ``zos_ping`` module.
-
-* Bug fixes and enhancements
-
- * Modules
-
- * ``zos_copy``
-
- * introduced an updated creation policy referred to as precedence rules
- that if `dest_data_set` is set, it will take precedence. If
- `dest` is an empty data set, the empty data set will be written with the
- expectation its attributes satisfy the copy. If no precedent rule
- has been exercised, `dest` will be created with the same attributes of
- `src`.
- * introduced new computation capabilities that if `dest` is a nonexistent
- data set, the attributes assigned will depend on the type of `src`. If
- `src` is a USS file, `dest` will have a Fixed Block (FB) record format
- and the remaining attributes will be computed. If `src` is binary,
- `dest` will have a Fixed Block (FB) record format with a record length
- of 80, block size of 32760, and the remaining attributes will be
- computed.
- * enhanced the force option when `force=true` and the remote file or
- data set `dest`` is NOT empty, the `dest` will be deleted and recreated
- with the `src` data set attributes, otherwise it will be recreated with
- the `dest` data set attributes.
- * was enhanced for when `src` is a directory and ends with "/",
- the contents of it will be copied into the root of `dest`. It it doesn't
- end with "/", the directory itself will be copied.
- * option `dest_dataset` has been deprecated and removed in favor
- of the new option `dest_data_set`.
- * fixes a bug that when a directory is copied from the controller to the
- managed node and a mode is set, the mode is applied to the directory
- on the managed node. If the directory being copied contains files and
- mode is set, mode will only be applied to the files being copied not the
- pre-existing files.
- * fixes a bug that did not create a data set on the specified volume.
- * fixes a bug where a number of attributes were not an option when using
- `dest_data_set`.
- * fixes a bug where options were not defined in the module
- argument spec that will result in error when running `ansible-core`
- v2.11 and using options `force` or `mode`.
- * was enhanced to support the ``ansible.builtin.ssh`` connection options;
- for further reference refer to the `SSH plugin`_ documentation.
- * was enhanced to take into account the record length when the
- source is a USS file and the destination is a data set with a record
- length. This is done by inspecting the destination data set attributes
- and using these attributes to create a new data set.
- * was updated with the capabilities to define destination data sets from
- within the ``zos_copy`` module. In the case where you are copying to
- data set destination that does not exist, you can now do so using the
- new ``zos_copy`` module option ``destination_dataset``.
-
- * ``zos_operator``
-
- * enhanced to allow for MVS operator `SET` command, `SET` is
- equivalent to the abbreviated `T` command.
-
- * ``zos_mount`` fixed option `tag_ccsid` to correctly allow for type int.
-
- * ``module_utils``
-
- * jobs.py - fixes a utility used by module `zos_job_output` that would
- truncate the DD content.
-
- * ``zos_ping`` was enhanced to remove the need for the ``zos_ssh``
- connection plugin dependency.
-
- * ``zos_fetch`` was enhanced to support the ``ansible.builtin.ssh``
- connection options; for further reference refer to the
- `SSH plugin`_ documentation.
-
- * ``zos_job_output``
-
- * was updated to correct possible truncated responses for
- the **ddname** content. This would occur for jobs with very large amounts
- of content from a **ddname**.
- * was enhanced to to include the completion code (CC) for each individual
- jop step as part of the ``ret_code`` response.
-
- * ``zos_job_query``
-
- * was enhanced to support a 7 digit job number ID for when there are
- greater than 99,999 jobs in the history.
- * was enhanced to handle when an invalid job ID or job name is used with
- the module and returns a proper response.
-
- * ``zos_job_submit``
-
- * was enhanced to fail fast when a submitted job fails instead of waiting
- a predetermined time.
- * was enhanced to check for 'JCL ERROR' when jobs are submitted and result
- in a proper module response.
-
- * ``zos_operator_action_query`` response messages were improved with more
- diagnostic information in the event an error is encountered.
-
-* Deprecated or removed
-
- * ``zos_copy`` module option **destination_dataset** has been renamed to
- **dest_data_set**.
- * ``zos_ssh`` connection plugin has been removed, it is no longer required.
- Remove all playbook references, ie ``connection: ibm.ibm_zos_core.zos_ssh``.
- * ``zos_ssh`` connection plugin has been removed, it is no longer required.
- You must remove the zos_ssh connection plugin from all playbooks that
- reference the plugin, for example connection: ibm.ibm_zos_core.zos_ssh.
- * ``zos_copy`` module option **model_ds** has been removed. The model_ds logic
- is now automatically managed and data sets are either created based on the
- ``src`` data set or overridden by the new option ``destination_dataset``.
- * ``zos_copy`` and ``zos_fetch`` option **sftp_port** has been deprecated. To
- set the SFTP port, use the supported options in the ``ansible.builtin.ssh``
- plugin. Refer to the `SSH port`_ option to configure the port used during
- the modules SFTP transport.
-
-* Documentation
-
- * Noteworthy documentation updates have been made to:
-
- * ``zos_copy`` and ``zos_fetch`` about Co:Z SFTP support.
- * ``zos_mvs_raw`` removed a duplicate example.
- * all action plugins are documented
- * update hyperlinks embedded in documentation.
- * ``zos_operator`` to explains how to use single quotes in operator commands.
-
-Availability
-------------
-
-* `Automation Hub`_
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Known Issues
-------------
-
-* If a playbook includes the deprecated ``zos_ssh`` connection plugin, for
- example ``connection: ibm.ibm_zos_core.zos_ssh``, it will
- encounter this error which can corrected by safely removing the plugin:
-
- .. code-block::
-
- "msg": "the connection plugin 'ibm.ibm_zos_core.zos_ssh' was not found"
-
-* When using the ``zos_ssh`` plugin with **Ansible 2.11** and earlier versions
- of this collection, you will encounter the exception:
-
- .. code-block::
-
- AttributeError: module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'.
-
- This is resolved in this release by deprecating the ``zos_ssh`` connection
- plugin and removing all ``connection: ibm.ibm_zos_core.zos_ssh`` references
- from playbooks.
-* When using module ``zos_copy`` and option ``force`` with ansible versions
- greater than **Ansbile 2.10** and earlier versions of this collection, an
- unsupported option exception would occur. This is resolved in this release.
-* When using the ``zos_copy`` or ``zos_fetch`` modules in earlier versions of
- this collection without 'passwordless' SSH configured such that you are using
- ``--ask-pass`` or passing an ``ansible_password`` in a configuration; during
- the playbook execution a second password prompt for SFTP would appear pausing
- the playbook execution. This is resolved in this release.
-* When using the ``zos_copy`` or ``zos_fetch`` modules, if you tried to use
- Ansible connection options such as ``host_key_checking`` or ``port``, they
- were not included as part of the modules execution. This is resolved in this
- release by ensuring compatibility with the ``ansible.builtin.ssh`` plugin
- options. Refer to the `SSH plugin`_ documentation to enable supported options.
-* Known issues for modules can be found in the **Notes** section of a modules
- documentation.
-
-
-Deprecation Notices
--------------------
-Features and functions are marked as deprecated when they are enhanced and an
-alternative is available. In most cases, the deprecated item will remain
-available unless the deprecated function interferes with the offering.
-Deprecated functions are no longer supported, and will be removed in a future
-release.
-
-.. _SSH plugin:
- https://docs.ansible.com/ansible/latest/collections/ansible/builtin/ssh_connection.html
-
-.. _SSH port:
- https://docs.ansible.com/ansible/latest/collections/ansible/builtin/ssh_connection.html#parameter-port
-
-Version 1.3.6
-=============
-
-What's New
-----------
-
-* Bug Fixes
-
- * Modules
-
- * ``zos_copy`` fixes a bug that when a directory is copied from the
- controller to the managed node and a mode is set, the mode is now applied
- to the directory on the controller. If the directory being copied contains
- files and mode is set, mode will only be applied to the files being copied
- not the pre-existing files.
- * ``zos_copy`` - fixes a bug where options were not defined in the module
- argument spec that will result in error when running `ansible-core` v2.11
- and using options `force` or `mode`.
- * ``zos_copy`` - was enhanced for when `src` is a directory and ends with "/",
- the contents of it will be copied into the root of `dest`. It it doesn't
- end with "/", the directory itself will be copied.
- * ``zos_fetch`` - fixes a bug where an option was not defined in the module
- argument spec that will result in error when running `ansible-core` v2.11
- and using option `encoding`.
- * ``zos_job_submit`` - fixes a bug where an option was not defined in the
- module argument spec that will result in error when running
- `ansible-core` v2.11 and using option `encoding`.
- * ``jobs.py`` - fixes a utility used by module `zos_job_output` that would
- truncate the DD content.
- * ``zos_ssh`` connection plugin was updated to correct a bug that causes
- an `ANSIBLE_SSH_CONTROL_PATH_DIR` attribute error only when using
- ansible-core v2.11.
-
-Availability
-------------
-
-* `Automation Hub`_
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 -
- `IBM Open Enterprise SDK for Python`_ v3.9.5
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Version 1.3.5
-=============
-
-What's New
-----------
-
-* Bug Fixes
-
- * Modules
-
- * ``zos_ssh`` connection plugin was updated to correct a bug in Ansible that
- would result in playbook task ``retries`` overriding the SSH connection
- ``retries``. This is resolved by renaming the ``zos_ssh`` option
- ``retries`` to ``reconnection_retries``. The update addresses users of
- ``ansible-core`` v2.9 which continues to use ``retries`` and users of
- ``ansible-core`` v2.11 or later which uses ``reconnection_retries``. This
- also resolves a bug in the connection that referenced a deprecated
- constant.
- * ``zos_job_output`` fixes a bug that returned all ddname's when a specific
- ddname was provided. Now a specific ddname can be returned and all others
- ignored.
- * ``zos_copy`` fixes a bug that would not copy subdirectories. If the source
- is a directory with sub directories, all sub directories will now be copied.
-
-Availability
+Requirements
------------
-* `Automation Hub`_
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Version 1.3.3
-=============
-
-What's New
-----------
-
-* Bug Fixes
-
- * Modules
-
- * ``zos_copy`` was updated to correct deletion of all temporary files and
- unwarranted deletes.
-
- * When the module would complete, a cleanup routine did not take into
- account that other processes had open temporary files and thus would
- error when trying to remove them.
- * When the module would copy a directory (source) from USS to another
- USS directory (destination), any files currently in the destination
- would be deleted.
- The modules behavior has changed such that files are no longer deleted
- unless the ``force`` option is set to ``true``. When ``force=true``,
- copying files or a directory to a USS destination will continue if it
- encounters existing files or directories and overwrite any
- corresponding files.
- * ``zos_job_query`` was updated to correct a boolean condition that always
- evaluated to "CANCELLED".
-
- * When querying jobs that are either **CANCELLED** or have **FAILED**,
- they were always treated as **CANCELLED**.
-
-Availability
-------------
-
-* `Automation Hub`_
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Version 1.3.1
-=============
-
-What's New
-----------
-
-* Bug Fixes
-
- * Modules
-
- * Connection plugin ``zos_ssh`` was updated to prioritize the execution of
- modules written in REXX over other implementations such is the case for
- ``zos_ping``.
- * ``zos_ping`` was updated to support Automation Hub documentation
- generation.
-
-Availability
-------------
-
-* `Automation Hub`_
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Known issues
-------------
-
-* Modules
-
- * When executing programs using ``zos_mvs_raw``, you may encounter errors
- that originate in the implementation of the programs. Two such known issues
- are noted below of which one has been addressed with an APAR.
-
- #. ``zos_mvs_raw`` module execution fails when invoking
- Database Image Copy 2 Utility or Database Recovery Utility in conjunction
- with FlashCopy or Fast Replication.
- #. ``zos_mvs_raw`` module execution fails when invoking DFSRRC00 with parm
- "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is
- addressed by APAR PH28089.
-
-Version 1.3.0
-=============
-
-What's New
-----------
-
-* Modules
-
- * ``zos_apf`` - Add or remove libraries to and from Authorized Program Facility (APF).
- * ``zos_backup_restore`` - Backup and restore data sets and volumes.
- * ``zos_blockinfile`` - Manage block of multi-line textual data on z/OS.
- * ``zos_find`` - Find matching data sets.
- * ``zos_data_set`` - added support to allocate and format zFS data sets
- * ``zos_operator`` - supports new options **wait** and **wait_time_s** such
- that you can specify that ``zos_operator`` wait the full **wait_time_s** or
- return as soon as the first operator command executes.
- * All modules support relative paths and remove choice case sensitivity.
-
-* Bug Fixes
-
- * Modules
-
- * Action plugin ``zos_copy`` was updated to support Python 2.7.
- * Module ``zos_copy`` was updated to fail gracefully when a it
- encounters a non-zero return code.
- * Module ``zos_copy`` was updated to support copying data set members that
- are program objects to a PDSE. Prior to this update, copying data set
- members would yield an error:
- **FSUM8976 Error writing to PDSE member
- **
- * Job utility is an internal library used by several modules. It has been
- updated to use a custom written parsing routine capable of handling
- special characters to prevent job related reading operations from failing
- when a special character is encountered.
- * Module ``zos_job_submit`` was updated to remove all trailing **\r** from
- jobs that are submitted from the controller.
- * Module ``zos_job_submit`` referenced a non-existent option and was
- corrected to **wait_time_s**.
- * Module ``zos_tso_command`` support was added for when the command output
- contained special characters.
-
- * Playbooks
-
- * Playbook `zos_operator_basics.yaml`_
- has been updated to use `end` in the WTO reply over the previous use of
- `cancel`. Using `cancel` is not a valid reply and results in an execution
- error.
-
-* Playbooks
-
- * In each release, we continue to expand on use cases and deliver them as
- playbooks in the `playbook repository`_ that can be easily tailored to any
- system.
-
- * Authorize and
- `synchronize APF authorized libraries on z/OS from a configuration file cloned from GitHub`_
- * Automate program execution with
- `copy, sort and fetch data sets on z/OS playbook`_.
- * Automate user management with add, remove, grant permission,
- generate passwords, create zFS, mount zFS and send email
- notifications when deployed to Ansible Tower or AWX with the
- `manage z/OS Users Using Ansible`_ playbook.
- * Use the `configure Python and ZOAU Installation`_ playbook to scan the
- **z/OS** target to find the latest supported configuration and generate
- `inventory`_ and a `variables`_ configuration.
- * Automate software management with `SMP/E Playbooks`_
- * All playbooks have been updated to use our temporary data set feature
- to avoid any concurrent data set name problems.
- * In the prior release, all sample playbooks previously included with the
- collection were migrated to the `playbook repository`_. The
- `playbook repository`_ categorizes playbooks into **z/OS concepts** and
- **topics**, it also covers `playbook configuration`_ as well as provide
- additional community content such as **blogs** and where to open
- `support tickets`_ for the playbooks.
-
-* Documentation
-
- * All documentation related to `playbook configuration`_ has been
- migrated to the `playbook repository`_. Each playbook contains a README
- that explains what configurations must be made to run a sample playbook.
- * We have been carefully reviewing our users feedback and over time we have
- compiled a list of information that we feel would help everyone and have
- released this information in our new `FAQs`_.
- * Learn about the latest features and experience them before you try
- them through the blogs that discuss playbooks, modules, and use cases:
-
- * `Running Batch Jobs on z/OS using Ansible`_ details how
- to write and execute batch jobs without having to deal with JCL.
-
- * `z/OS User Management With Ansible`_ explains all about the user management
- playbook and its optional integration into AWX.
-
-Availability
-------------
-
-* `Galaxy`_
-* `GitHub`_
-
-Reference
----------
-
-* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1
-* Supported by the `z/OS® shell`_
-* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later
-* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and
- `Z Open Automation Utilities 1.1.1`_
-
-Known issues
-------------
-
-* Modules
-
- * When executing programs using ``zos_mvs_raw``, you may encounter errors
- that originate in the implementation of the programs. Two such known issues
- are noted below of which one has been addressed with an APAR.
-
- #. ``zos_mvs_raw`` module execution fails when invoking
- Database Image Copy 2 Utility or Database Recovery Utility in conjunction
- with FlashCopy or Fast Replication.
- #. ``zos_mvs_raw`` module execution fails when invoking DFSRRC00 with parm
- "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is
- addressed by APAR PH28089.
+The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the
+controller and z/OS managed node dependencies.
.. .............................................................................
.. Global Links
@@ -1035,6 +574,8 @@ Known issues
https://www.ibm.com/docs/en/python-zos/3.11
.. _3.12:
https://www.ibm.com/docs/en/python-zos/3.12
+.. _Z Open Automation Utilities:
+ https://www.ibm.com/docs/en/zoau/latest
.. _Z Open Automation Utilities 1.1.0:
https://www.ibm.com/docs/en/zoau/1.1.x
.. _Z Open Automation Utilities 1.1.1:
@@ -1047,6 +588,8 @@ Known issues
https://www.ibm.com/docs/en/zoau/1.2.x
.. _Z Open Automation Utilities 1.2.5:
https://www.ibm.com/docs/en/zoau/1.2.x
+.. _Z Open Automation Utilities 1.3.0:
+ https://www.ibm.com/docs/en/zoau/1.3.x
.. _z/OS® shell:
https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm
.. _z/OS®:
@@ -1059,6 +602,8 @@ Known issues
https://www.ibm.com/docs/en/zos
.. _FAQs:
https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html
+.. _z/OS core support matrix:
+ https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html
.. .............................................................................
.. Playbook Links
diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst
index 89419777d..a230b9af6 100644
--- a/docs/source/requirements-single.rst
+++ b/docs/source/requirements-single.rst
@@ -2,7 +2,7 @@
.. Auto generated restructured text .
.. ...........................................................................
.. ...........................................................................
-.. © Copyright IBM Corporation 2020 .
+.. © Copyright IBM Corporation 2024 .
.. ...........................................................................
============
diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst
new file mode 100644
index 000000000..acb0e6559
--- /dev/null
+++ b/docs/source/resources/releases_maintenance.rst
@@ -0,0 +1,98 @@
+.. ...........................................................................
+.. © Copyright IBM Corporation 2024 .
+.. ...........................................................................
+
+========================
+Releases and maintenance
+========================
+
+This table describes the collections release dates, dependency versions and End of Life dates (EOL).
+
+The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally, each quarter
+a beta is released followed by a GA version. Occasionally, the cycle may be extended to properly implement and
+test larger changes before a new release is made available.
+
+End of Life for this collection is generally a 2-year cycle unless a dependency reaches EOL prior to the 2 years.
+For example, if a collection has released and its dependency reaches EOL 1 year later, then the collection will EOL
+at the same time as the dependency, 1 year later.
+
+These are the component versions available when the collection was made generally available (GA). The underlying
+component version is likely to change as it reaches EOL, thus components must be a version that is
+currently supported.
+
+For example, if a collection releases with a minimum version of ``ansible-core`` 2.14.0 (Ansible 7.0) and later this
+enters into EOL, then a newer supported version of ``ansible-core`` (Ansible) must be selected. When choosing a newer
+``ansible-core`` (Ansible) version, review the `ansible-core support matrix`_ to select the appropriate dependencies.
+This is important to note, different releases of ``ansible-core`` can require newer controller and managed node
+dependencies such as is the case with Python.
+
+If the controller is Ansible Automation Platform (AAP), review the `Red Hat Ansible Automation Platform Life Cycle`_
+to select a supported AAP version.
+
+For IBM product lifecycle information, you can search for products using a product name, version or ID. For example,
+to view IBM's **Open Enterprise SDK for Python** lifecycle, search on product ID `5655-PYT`_, and for **Z Open Automation Utilities**,
+search on product ID `5698-PA1`_.
+
+Support Matrix
+==============
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+| Version | Controller | Managed Node | GA | End of Life |
++=========+============================+===================================================+===============+===============+
+| 1.10.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD |
+| |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | |
+| |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | |
+| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | |
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+| 1.9.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 |
+| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | |
+| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | |
+| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | |
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+| 1.8.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 |
+| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | |
+| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | |
+| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | |
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+| 1.7.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 |
+| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | |
+| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | |
+| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | |
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+| 1.6.x |- `ansible-core`_ >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 |
+| |- `Ansible`_ >=2.9.x |- `z/OS shell`_ | | |
+| |- `AAP`_ >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | |
+| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | |
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+| 1.5.x |- `ansible-core`_ >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 |
+| |- `Ansible`_ >=2.9.x |- `z/OS shell`_ | | |
+| |- `AAP`_ >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | |
+| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | |
++---------+----------------------------+---------------------------------------------------+---------------+---------------+
+
+.. .............................................................................
+.. Global Links
+.. .............................................................................
+.. _ansible-core support matrix:
+ https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix
+.. _AAP:
+ https://access.redhat.com/support/policy/updates/ansible-automation-platform
+.. _Red Hat Ansible Automation Platform Life Cycle:
+ https://access.redhat.com/support/policy/updates/ansible-automation-platform
+.. _Automation Hub:
+ https://www.ansible.com/products/automation-hub
+.. _Open Enterprise SDK for Python:
+ https://www.ibm.com/products/open-enterprise-python-zos
+.. _Z Open Automation Utilities:
+ https://www.ibm.com/docs/en/zoau/latest
+.. _z/OS shell:
+ https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm
+.. _z/OS:
+ https://www.ibm.com/docs/en/zos
+.. _5655-PYT:
+ https://www.ibm.com/support/pages/lifecycle/search?q=5655-PYT
+.. _5698-PA1:
+ https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1
+.. _ansible-core:
+ https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix
+.. _Ansible:
+ https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix
\ No newline at end of file
diff --git a/docs/source/resources/resources.rst b/docs/source/resources/resources.rst
index 8b5951948..8bdb16a6c 100644
--- a/docs/source/resources/resources.rst
+++ b/docs/source/resources/resources.rst
@@ -1,5 +1,5 @@
.. ...........................................................................
-.. © Copyright IBM Corporation 2020, 2021 .
+.. © Copyright IBM Corporation 2024 .
.. ...........................................................................
=========
@@ -10,3 +10,4 @@ Resources
:maxdepth: 1
character_set
+ releases_maintenance
diff --git a/galaxy.yml b/galaxy.yml
index e273940f7..c408424aa 100644
--- a/galaxy.yml
+++ b/galaxy.yml
@@ -6,7 +6,7 @@ namespace: ibm
name: ibm_zos_core
# The collection version
-version: 1.9.0
+version: 1.10.0-beta.1
# Collection README file
readme: README.md
diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml
index 1e920da6b..9b4dfde5e 100644
--- a/meta/ibm_zos_core_meta.yml
+++ b/meta/ibm_zos_core_meta.yml
@@ -1,5 +1,5 @@
name: ibm_zos_core
-version: "1.9.0"
+version: "1.10.0-beta.1"
managed_requirements:
-
name: "IBM Open Enterprise SDK for Python"
@@ -7,4 +7,4 @@ managed_requirements:
-
name: "Z Open Automation Utilities"
version:
- - "1.2.5"
+ - ">=1.3.0"
diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py
index 592126b00..90d49874a 100644
--- a/plugins/action/zos_copy.py
+++ b/plugins/action/zos_copy.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2019-2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -29,11 +29,12 @@
from ansible import cli
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import (
- is_member,
- is_data_set
+ is_member
)
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode
+
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template
@@ -69,8 +70,8 @@ def run(self, tmp=None, task_vars=None):
owner = task_args.get("owner", None)
group = task_args.get("group", None)
- is_pds = is_src_dir = False
- temp_path = is_uss = is_mvs_dest = src_member = None
+ is_src_dir = False
+ temp_path = is_uss = None
if dest:
if not isinstance(dest, string_types):
@@ -78,7 +79,6 @@ def run(self, tmp=None, task_vars=None):
return self._exit_action(result, msg, failed=True)
else:
is_uss = "/" in dest
- is_mvs_dest = is_data_set(dest)
else:
msg = "Destination is required"
return self._exit_action(result, msg, failed=True)
@@ -96,13 +96,11 @@ def run(self, tmp=None, task_vars=None):
msg = "'src' or 'dest' must not be empty"
return self._exit_action(result, msg, failed=True)
else:
- src_member = is_member(src)
if not remote_src:
if src.startswith('~'):
src = os.path.expanduser(src)
src = os.path.realpath(src)
is_src_dir = os.path.isdir(src)
- is_pds = is_src_dir and is_mvs_dest
if not src and not content:
msg = "'src' or 'content' is required"
@@ -196,11 +194,6 @@ def run(self, tmp=None, task_vars=None):
src = rendered_dir
- task_args["size"] = sum(
- os.stat(os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(f))).st_size
- for path, dirs, files in os.walk(src)
- for f in files
- )
else:
if mode == "preserve":
task_args["mode"] = "0{0:o}".format(
@@ -231,7 +224,6 @@ def run(self, tmp=None, task_vars=None):
src = rendered_file
- task_args["size"] = os.stat(src).st_size
display.vvv(u"ibm_zos_copy calculated size: {0}".format(os.stat(src).st_size), host=self._play_context.remote_addr)
transfer_res = self._copy_to_remote(
src, is_dir=is_src_dir, ignore_stderr=ignore_sftp_stderr
@@ -242,15 +234,31 @@ def run(self, tmp=None, task_vars=None):
return transfer_res
display.vvv(u"ibm_zos_copy temp path: {0}".format(transfer_res.get("temp_path")), host=self._play_context.remote_addr)
+ if not encoding:
+ encoding = {
+ "from": encode.Defaults.get_default_system_charset(),
+ }
+
+ """
+ We format temp_path correctly to pass it as src option to the module,
+ we keep the original source to return to the user and avoid confusion
+ by returning the temp_path created.
+ """
+ original_src = task_args.get("src")
+ if original_src:
+ if not remote_src:
+ os.path.basename(original_src)
+ if original_src.endswith("/"):
+ src = temp_path + "/"
+ else:
+ src = temp_path
+ else:
+ src = temp_path
+
task_args.update(
dict(
- is_uss=is_uss,
- is_pds=is_pds,
- is_src_dir=is_src_dir,
- src_member=src_member,
- temp_path=temp_path,
- is_mvs_dest=is_mvs_dest,
- local_charset=encode.Defaults.get_default_system_charset()
+ src=src,
+ encoding=encoding,
)
)
copy_res = self._execute_module(
@@ -284,17 +292,20 @@ def run(self, tmp=None, task_vars=None):
self._remote_cleanup(dest, copy_res.get("dest_exists"), task_vars)
return result
- return _update_result(is_binary, copy_res, self._task.args)
+ return _update_result(is_binary, copy_res, self._task.args, original_src)
def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False):
"""Copy a file or directory to the remote z/OS system """
- temp_path = "/{0}/{1}".format(gettempprefix(), _create_temp_path_name())
+ temp_path = "/{0}/{1}/{2}".format(gettempprefix(), _create_temp_path_name(), os.path.basename(src))
+ self._connection.exec_command("mkdir -p {0}".format(os.path.dirname(temp_path)))
_src = src.replace("#", "\\#")
_sftp_action = 'put'
+ full_temp_path = temp_path
if is_dir:
src = src.rstrip("/") if src.endswith("/") else src
+ temp_path = os.path.dirname(temp_path)
base = os.path.basename(src)
self._connection.exec_command("mkdir -p {0}/{1}".format(temp_path, base))
_sftp_action += ' -r' # add '-r` to clone the source trees
@@ -379,7 +390,7 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False):
display.vvv(u"ibm_zos_copy SSH transfer method restored to {0}".format(user_ssh_transfer_method), host=self._play_context.remote_addr)
is_ssh_transfer_method_updated = False
- return dict(temp_path=temp_path)
+ return dict(temp_path=full_temp_path)
def _remote_cleanup(self, dest, dest_exists, task_vars):
"""Remove all files or data sets pointed to by 'dest' on the remote
@@ -394,7 +405,7 @@ def _remote_cleanup(self, dest, dest_exists, task_vars):
else:
module_args = dict(name=dest, state="absent")
if is_member(dest):
- module_args["type"] = "MEMBER"
+ module_args["type"] = "member"
self._execute_module(
module_name="ibm.ibm_zos_core.zos_data_set",
module_args=module_args,
@@ -417,7 +428,7 @@ def _exit_action(self, result, msg, failed=False):
return result
-def _update_result(is_binary, copy_res, original_args):
+def _update_result(is_binary, copy_res, original_args, original_src):
""" Helper function to update output result with the provided values """
ds_type = copy_res.get("ds_type")
src = copy_res.get("src")
@@ -431,7 +442,7 @@ def _update_result(is_binary, copy_res, original_args):
invocation=dict(module_args=original_args),
)
if src:
- updated_result["src"] = src
+ updated_result["src"] = original_src
if note:
updated_result["note"] = note
if backup_name:
@@ -457,6 +468,16 @@ def _update_result(is_binary, copy_res, original_args):
updated_result["dest_created"] = True
updated_result["destination_attributes"] = dest_data_set_attrs
+ # Setting attributes to lower case to conform to docs.
+ # Part of the change to lowercase choices in the collection involves having
+ # a consistent interface that also returns the same values in lowercase.
+ if "record_format" in updated_result["destination_attributes"]:
+ updated_result["destination_attributes"]["record_format"] = updated_result["destination_attributes"]["record_format"].lower()
+ if "space_type" in updated_result["destination_attributes"]:
+ updated_result["destination_attributes"]["space_type"] = updated_result["destination_attributes"]["space_type"].lower()
+ if "type" in updated_result["destination_attributes"]:
+ updated_result["destination_attributes"]["type"] = updated_result["destination_attributes"]["type"].lower()
+
return updated_result
diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py
index 087c70953..56232f34f 100644
--- a/plugins/action/zos_fetch.py
+++ b/plugins/action/zos_fetch.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2019-2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -107,7 +107,7 @@ def run(self, tmp=None, task_vars=None):
src = self._task.args.get('src')
dest = self._task.args.get('dest')
- encoding = self._task.args.get('encoding')
+ encoding = self._task.args.get('encoding', None)
flat = _process_boolean(self._task.args.get('flat'), default=False)
is_binary = _process_boolean(self._task.args.get('is_binary'))
ignore_sftp_stderr = _process_boolean(
@@ -219,9 +219,13 @@ def run(self, tmp=None, task_vars=None):
# Execute module on remote host #
# ********************************************************** #
new_module_args = self._task.args.copy()
- new_module_args.update(
- dict(local_charset=encode.Defaults.get_default_system_charset())
- )
+ encoding_to = None
+ if encoding:
+ encoding_to = encoding.get("to", None)
+ if encoding is None or encoding_to is None:
+ new_module_args.update(
+ dict(encoding=dict(to=encode.Defaults.get_default_system_charset()))
+ )
remote_path = None
try:
fetch_res = self._execute_module(
diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py
index 6bbd0f9d9..67047b648 100644
--- a/plugins/action/zos_job_submit.py
+++ b/plugins/action/zos_job_submit.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2019, 2020, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -44,15 +44,15 @@ def run(self, tmp=None, task_vars=None):
use_template = _process_boolean(module_args.get("use_template"))
location = module_args.get("location")
- if use_template and location != "LOCAL":
+ if use_template and location != "local":
result.update(dict(
failed=True,
changed=False,
- msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'LOCAL'".format(location)
+ msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'local'".format(location)
))
return result
- if location == "LOCAL":
+ if location == "local":
source = self._task.args.get("src", None)
diff --git a/plugins/action/zos_ping.py b/plugins/action/zos_ping.py
index 9d644d389..b3b2b328a 100644
--- a/plugins/action/zos_ping.py
+++ b/plugins/action/zos_ping.py
@@ -1,6 +1,6 @@
# (c) 2012, Michael DeHaan
# Copyright (c) 2017 Ansible Project
-# Copyright IBM Corporation 2020, 2021, 2022
+# Copyright IBM Corporation 2020, 2022
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py
index 6e679d62d..b0a1fa466 100644
--- a/plugins/action/zos_unarchive.py
+++ b/plugins/action/zos_unarchive.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2023
+# Copyright (c) IBM Corporation 2023, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -87,11 +87,11 @@ def run(self, tmp=None, task_vars=None):
)
dest = cmd_res.get("stdout")
if dest_data_set.get("space_primary") is None:
- dest_data_set.update(space_primary=5, space_type="M")
+ dest_data_set.update(space_primary=5, space_type="m")
if format_name == 'terse':
- dest_data_set.update(type='SEQ', record_format='FB', record_length=1024)
+ dest_data_set.update(type='seq', record_format='fb', record_length=1024)
if format_name == 'xmit':
- dest_data_set.update(type='SEQ', record_format='FB', record_length=80)
+ dest_data_set.update(type='seq', record_format='fb', record_length=80)
copy_module_args.update(
dict(
diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py
index 17b530218..483fbdb73 100644
--- a/plugins/filter/wtor.py
+++ b/plugins/filter/wtor.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2020
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -54,8 +54,8 @@
- name: Evaluate if there are any existing dump messages matching 'IEE094D SPECIFY OPERAND'
assert:
that:
- - is_specify_operand is defined
- - bool_zos_operator_action_continue
+ - is_specify_operand is defined
+ - bool_zos_operator_action_continue
success_msg: "Found 'IEE094D SPECIFY OPERAND' message."
fail_msg: "Did not find 'IEE094D SPECIFY OPERAND' message."
"""
diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py
index 28339d842..46f8669c5 100644
--- a/plugins/module_utils/backup.py
+++ b/plugins/module_utils/backup.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2020, 2022
+# Copyright (c) IBM Corporation 2020 - 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -22,8 +22,9 @@
import time
from shutil import copy2, copytree, rmtree
+import traceback
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import (
BetterArgParser,
@@ -39,9 +40,10 @@
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import iebcopy
try:
- from zoautil_py import datasets
+ from zoautil_py import datasets, exceptions
except Exception:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
+ exceptions = ZOAUImportError(traceback.format_exc())
if PY3:
from shlex import quote
else:
@@ -76,29 +78,32 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None):
bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name())
bk_dsn = _validate_data_set_name(bk_dsn).upper()
- response = datasets._copy(dsn, bk_dsn)
- if response.rc != 0:
+ try:
+ datasets.copy(dsn, bk_dsn)
+ except exceptions.ZOAUException as copy_exception:
raise BackupError(
"Unable to backup {0} to {1}".format(dsn, bk_dsn),
- rc=response.rc,
- stdout=response.stdout_response,
- stderr=response.stderr_response
+ rc=copy_exception.response.rc,
+ stdout=copy_exception.response.stdout_response,
+ stderr=copy_exception.response.stderr_response
)
else:
if not bk_dsn:
if tmphlq:
hlq = tmphlq
else:
- hlq = datasets.hlq()
- bk_dsn = datasets.tmp_name(hlq)
+ hlq = datasets.get_hlq()
+ bk_dsn = datasets.tmp_name(high_level_qualifier=hlq)
bk_dsn = _validate_data_set_name(bk_dsn).upper()
# In case the backup ds is a member we trust that the PDS attributes are ok to fit the src content.
# This should not delete a PDS just to create a backup member.
# Otherwise, we allocate the appropiate space for the backup ds based on src.
if is_member(bk_dsn):
- cp_response = datasets._copy(dsn, bk_dsn)
- cp_rc = cp_response.rc
+ try:
+ cp_rc = datasets.copy(dsn, bk_dsn)
+ except exceptions.ZOAUException as copy_exception:
+ cp_rc = copy_exception.response.rc
else:
cp_rc = _copy_ds(dsn, bk_dsn)
diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py
index ac9e74758..68e2e8385 100644
--- a/plugins/module_utils/copy.py
+++ b/plugins/module_utils/copy.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2019-2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -283,7 +283,15 @@ def copy_asa_pds2uss(src, dest):
str -- The stderr after the copy command executed successfully
"""
from os import path
- from zoautil_py import datasets
+ import traceback
+ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
+ ZOAUImportError,
+ )
+
+ try:
+ from zoautil_py import datasets
+ except Exception:
+ datasets = ZOAUImportError(traceback.format_exc())
src = _validate_data_set_name(src)
dest = _validate_path(dest)
diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py
index 379ccf909..50270a8b2 100644
--- a/plugins/module_utils/data_set.py
+++ b/plugins/module_utils/data_set.py
@@ -15,6 +15,7 @@
import re
import tempfile
+import traceback
from os import path, walk
from string import ascii_uppercase, digits
from random import sample
@@ -24,8 +25,8 @@
AnsibleModuleHelper,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
MissingImport,
+ ZOAUImportError,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
@@ -39,9 +40,10 @@
vtoc = MissingImport("vtoc")
try:
- from zoautil_py import datasets
+ from zoautil_py import datasets, exceptions
except ImportError:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
+ exceptions = ZOAUImportError(traceback.format_exc())
class DataSet(object):
@@ -175,13 +177,14 @@ def ensure_present(
changed = False
if DataSet.data_set_cataloged(name):
present = True
+
if not present:
try:
DataSet.create(**arguments)
except DatasetCreateError as e:
raise_error = True
# data set exists on volume
- if "Error Code: 0x4704" in e.msg:
+ if "DatasetVerificationError" in e.msg or "Error Code: 0x4704" in e.msg:
present, changed = DataSet.attempt_catalog_if_necessary(
name, volumes
)
@@ -316,7 +319,11 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo
# Now adding special parameters for sequential and partitioned
# data sets.
if model_type not in DataSet.MVS_VSAM:
- block_size = datasets.listing(model)[0].block_size
+ try:
+ data_set = datasets.list_datasets(model)[0]
+ except IndexError:
+ raise AttributeError("Could not retrieve model data set block size.")
+ block_size = data_set.block_size
alloc_cmd = """{0} -
BLKSIZE({1})""".format(alloc_cmd, block_size)
@@ -349,6 +356,7 @@ def data_set_cataloged(name, volumes=None):
"""
name = name.upper()
+
module = AnsibleModuleHelper(argument_spec={})
stdin = " LISTCAT ENTRIES('{0}')".format(name)
rc, stdout, stderr = module.run_command(
@@ -505,7 +513,7 @@ def data_set_volume(name):
DatasetVolumeError: When the function is unable to parse the value
of VOLSER.
"""
- data_set_information = datasets.listing(name)
+ data_set_information = datasets.list_datasets(name)
if len(data_set_information) > 0:
return data_set_information[0].volume
@@ -540,12 +548,12 @@ def data_set_type(name, volume=None):
if not DataSet.data_set_exists(name, volume):
return None
- data_sets_found = datasets.listing(name)
+ data_sets_found = datasets.list_datasets(name)
- # Using the DSORG property when it's a sequential or partitioned
- # dataset. VSAMs are not found by datasets.listing.
+ # Using the organization property when it's a sequential or partitioned
+ # dataset. VSAMs are not found by datasets.list_datasets.
if len(data_sets_found) > 0:
- return data_sets_found[0].dsorg
+ return data_sets_found[0].organization
# Next, trying to get the DATA information of a VSAM through
# LISTCAT.
@@ -911,13 +919,13 @@ def _build_zoau_args(**kwargs):
secondary += space_type
type = kwargs.get("type")
- if type and type == "ZFS":
+ if type and type.upper() == "ZFS":
type = "LDS"
volumes = ",".join(volumes) if volumes else None
kwargs["space_primary"] = primary
kwargs["space_secondary"] = secondary
- kwargs["type"] = type
+ kwargs["dataset_type"] = type
kwargs["volumes"] = volumes
kwargs.pop("space_type", None)
renamed_args = {}
@@ -951,7 +959,7 @@ def create(
force=None,
):
"""A wrapper around zoautil_py
- Dataset.create() to raise exceptions on failure.
+ datasets.create() to raise exceptions on failure.
Reasonable default arguments will be set by ZOAU when necessary.
Args:
@@ -1012,17 +1020,31 @@ def create(
"""
original_args = locals()
formatted_args = DataSet._build_zoau_args(**original_args)
- response = datasets._create(**formatted_args)
- if response.rc > 0:
+ try:
+ datasets.create(**formatted_args)
+ except exceptions._ZOAUExtendableException as create_exception:
+ raise DatasetCreateError(
+ name,
+ create_exception.response.rc,
+ create_exception.response.stdout_response + "\n" + create_exception.response.stderr_response
+ )
+ except exceptions.DatasetVerificationError:
+ # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0
+ if volumes and len(volumes) > 1:
+ if DataSet.data_set_cataloged(name, volumes):
+ return 0
raise DatasetCreateError(
- name, response.rc, response.stdout_response + response.stderr_response
+ name,
+ msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.",
)
- return response.rc
+ # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned
+ # response.rc now we just return 0 if nothing failed
+ return 0
@staticmethod
def delete(name):
"""A wrapper around zoautil_py
- Dataset.delete() to raise exceptions on failure.
+ datasets.delete() to raise exceptions on failure.
Arguments:
name (str) -- The name of the data set to delete.
@@ -1061,7 +1083,7 @@ def create_member(name):
@staticmethod
def delete_member(name, force=False):
"""A wrapper around zoautil_py
- Dataset.delete_members() to raise exceptions on failure.
+ datasets.delete_members() to raise exceptions on failure.
Arguments:
name (str) -- The name of the data set, including member name, to delete.
@@ -1311,7 +1333,7 @@ def temp_name(hlq=""):
str: The temporary data set name.
"""
if not hlq:
- hlq = datasets.hlq()
+ hlq = datasets.get_hlq()
temp_name = datasets.tmp_name(hlq)
return temp_name
@@ -1772,12 +1794,19 @@ def __init__(self, data_set, rc):
class DatasetCreateError(Exception):
- def __init__(self, data_set, rc, msg=""):
- self.msg = (
- 'An error occurred during creation of data set "{0}". RC={1}, {2}'.format(
- data_set, rc, msg
+ def __init__(self, data_set, rc=None, msg=""):
+ if rc:
+ self.msg = (
+ 'An error occurred during creation of data set "{0}". RC={1}, {2}'.format(
+ data_set, rc, msg
+ )
+ )
+ else:
+ self.msg = (
+ 'An error occurred during creation of data set "{0}". {1}'.format(
+ data_set, msg
+ )
)
- )
super().__init__(self.msg)
diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py
index d35f9e44e..b052f2574 100644
--- a/plugins/module_utils/dd_statement.py
+++ b/plugins/module_utils/dd_statement.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2020, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -10,11 +10,11 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
-
+import traceback
__metaclass__ = type
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet
@@ -22,7 +22,7 @@
try:
from zoautil_py import datasets
except ImportError:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
space_units = {"b": "", "kb": "k", "mb": "m", "gb": "g"}
@@ -651,8 +651,8 @@ def __init__(self, tmphlq=None):
if tmphlq:
hlq = tmphlq
else:
- hlq = datasets.hlq()
- name = datasets.tmp_name(hlq)
+ hlq = datasets.get_hlq()
+ name = datasets.tmp_name(high_level_qualifier=hlq)
super().__init__(name)
def __del__(self):
diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py
index 26bb983b3..83e9746c0 100644
--- a/plugins/module_utils/encode.py
+++ b/plugins/module_utils/encode.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2020 - 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -24,9 +24,10 @@
import os
import re
import locale
+import traceback
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import (
BetterArgParser,
@@ -39,7 +40,7 @@
try:
from zoautil_py import datasets
except Exception:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
if PY3:
@@ -55,10 +56,12 @@ class Defaults:
@staticmethod
def get_default_system_charset():
- """Get the default encoding of the current machine
+ """Get the default encoding of the current machine.
- Returns:
- str -- The encoding of the current machine
+ Returns
+ -------
+ str
+ The encoding of the current machine.
"""
system_charset = locale.getdefaultlocale()[1]
if system_charset is None:
@@ -79,15 +82,24 @@ def get_default_system_charset():
class EncodeUtils(object):
def __init__(self):
"""Call the coded character set conversion utility iconv
- to convert a USS file from one coded character set to another
-
- Arguments:
- module {AnsibleModule} -- The AnsibleModule object from currently running module
+ to convert a USS file from one coded character set to another.
"""
self.module = AnsibleModuleHelper(argument_spec={})
self.tmphlq = None
def _validate_data_set_name(self, ds):
+ """Validate data set name using BetterArgParser.
+
+ Parameters
+ ----------
+ ds : str
+ The source data set name.
+
+ Returns
+ -------
+ str
+ Parsed data set name.
+ """
arg_defs = dict(
ds=dict(arg_type="data_set"),
)
@@ -96,6 +108,18 @@ def _validate_data_set_name(self, ds):
return parsed_args.get("ds")
def _validate_path(self, path):
+ """Validate path using BetterArgParser.
+
+ Parameters
+ ----------
+ path : str
+ The path.
+
+ Returns
+ -------
+ str
+ Parsed path.
+ """
arg_defs = dict(
path=dict(arg_type="path"),
)
@@ -104,6 +128,18 @@ def _validate_path(self, path):
return parsed_args.get("path")
def _validate_data_set_or_path(self, path):
+ """Validate data set or path using BetterArgParser.
+
+ Parameters
+ ----------
+ path : str
+ The path.
+
+ Returns
+ -------
+ str
+ Parsed path.
+ """
arg_defs = dict(
path=dict(arg_type="data_set_or_path"),
)
@@ -112,6 +148,18 @@ def _validate_data_set_or_path(self, path):
return parsed_args.get("path")
def _validate_encoding(self, encoding):
+ """Validate encoding using BetterArgParser.
+
+ Parameters
+ ---------
+ encoding : str
+ The encoding.
+
+ Returns
+ -------
+ str
+ Parsed encoding.
+ """
arg_defs = dict(
encoding=dict(arg_type="encoding"),
)
@@ -121,16 +169,24 @@ def _validate_encoding(self, encoding):
def listdsi_data_set(self, ds):
"""Invoke IDCAMS LISTCAT command to get the record length and space used
- to estimate the space used by the VSAM data set
-
- Arguments:
- ds: {str} -- The VSAM data set to be checked.
-
- Raises:
- EncodeError: When any exception is raised during the conversion.
- Returns:
- int -- The maximum record length of the VSAM data set.
- int -- The space used by the VSAM data set(KB).
+ to estimate the space used by the VSAM data set.
+
+ Parameters
+ ----------
+ ds : str
+ The VSAM data set to be checked.
+
+ Returns
+ -------
+ int
+ The maximum record length of the VSAM data set.
+ int
+ The space used by the VSAM data set(KB).
+
+ Raises
+ ------
+ EncodeError
+ When any exception is raised during the conversion.
"""
ds = self._validate_data_set_name(ds)
reclen = 80
@@ -178,42 +234,53 @@ def listdsi_data_set(self, ds):
return reclen, space_u
def temp_data_set(self, reclen, space_u):
- """Creates a temporary data set with the given record length and size
-
- Arguments:
- size {str} -- The size of the data set
- lrecl {int} -- The record length of the data set
-
- Returns:
- str -- Name of the allocated data set
-
- Raises:
- OSError: When any exception is raised during the data set allocation
+ """Creates a temporary data set with the given record length and size.
+
+ Parameters
+ ----------
+ lrecl : int
+ The record length of the data set.
+ space_u : str
+ The size of the data set.
+
+ Returns
+ -------
+ str
+ Name of the allocated data set.
+
+ Raises
+ ------
+ ZOAUException
+ When any exception is raised during the data set allocation.
+ DatasetVerificationError: When the data set creation could not be verified.
"""
size = str(space_u * 2) + "K"
if self.tmphlq:
hlq = self.tmphlq
else:
- hlq = datasets.hlq()
- temp_ps = datasets.tmp_name(hlq)
- response = datasets._create(
+ hlq = datasets.get_hlq()
+ temp_ps = datasets.tmp_name(high_level_qualifier=hlq)
+ temporary_data_set = datasets.create(
name=temp_ps,
- type="SEQ",
+ dataset_type="SEQ",
primary_space=size,
record_format="VB",
record_length=reclen,
)
- if response.rc:
- raise OSError("Failed when allocating temporary sequential data set!")
- return temp_ps
+ return temporary_data_set.name
def get_codeset(self):
- """Get the list of supported encodings from the USS command 'iconv -l'
+ """Get the list of supported encodings from the USS command 'iconv -l'.
+
+ Returns
+ -------
+ Union[str]
+ The code set list supported in current USS platform.
- Raises:
- EncodeError: When any exception is raised during the conversion
- Returns:
- list -- The code set list supported in current USS platform
+ Raises
+ ------
+ EncodeError
+ When any exception is raised during the conversion.
"""
code_set = None
iconv_list_cmd = ["iconv", "-l"]
@@ -226,17 +293,26 @@ def get_codeset(self):
return code_set
def string_convert_encoding(self, src, from_encoding, to_encoding):
- """Convert the encoding of the data when the src is a normal string
-
- Arguments:
- from_code_set: {str} -- The source code set of the string
- to_code_set: {str} -- The destination code set for the string
- src: {str} -- The input string content
-
- Raises:
- EncodeError: When any exception is raised during the conversion
- Returns:
- str -- The string content after the encoding
+ """Convert the encoding of the data when the src is a normal string.
+
+ Parameters
+ ----------
+ src : str
+ The input string content.
+ from_encoding : str
+ The source code set of the string.
+ to_encoding : str
+ The destination code set for the string.
+
+ Returns
+ -------
+ str
+ The string content after the encoding.
+
+ Raises
+ ------
+ EncodeError
+ When any exception is raised during the conversion.
"""
from_encoding = self._validate_encoding(from_encoding)
to_encoding = self._validate_encoding(to_encoding)
@@ -249,19 +325,30 @@ def string_convert_encoding(self, src, from_encoding, to_encoding):
return out
def uss_convert_encoding(self, src, dest, from_code, to_code):
- """Convert the encoding of the data in a USS file
-
- Arguments:
- from_code: {str} -- The source code set of the input file
- to_code: {str} -- The destination code set for the output file
- src: {str} -- The input file name, it should be a uss file
- dest: {str} -- The output file name, it should be a uss file
-
- Raises:
- EncodeError: When any exception is raised during the conversion.
- MoveFileError: When any exception is raised during moving files.
- Returns:
- boolean -- Indicate whether the conversion is successful or not.
+ """Convert the encoding of the data in a USS file.
+
+ Parameters
+ ----------
+ src : str
+ The input file name, it should be a uss file.
+ dest : str
+ The output file name, it should be a uss file.
+ from_code : str
+ The source code set of the input file.
+ to_code : str
+ The destination code set for the output file.
+
+ Returns
+ -------
+ bool
+ Indicate whether the conversion is successful or not.
+
+ Raises
+ ------
+ EncodeError
+ When any exception is raised during the conversion.
+ MoveFileError
+ When any exception is raised during moving files.
"""
src = self._validate_path(src)
dest = self._validate_path(dest)
@@ -306,18 +393,28 @@ def uss_convert_encoding(self, src, dest, from_code, to_code):
def uss_convert_encoding_prev(self, src, dest, from_code, to_code):
"""For multiple files conversion, such as a USS path or MVS PDS data set,
- use this method to split then do the conversion
-
- Arguments:
- from_code: {str} -- The source code set of the input path
- to_code: {str} -- The destination code set for the output path
- src: {str} -- The input uss path or a file
- dest: {str} -- The output uss path or a file
-
- Raises:
- EncodeError: When direcotry is empty or copy multiple files to a single file
- Returns:
- boolean -- Indicate whether the conversion is successful or not
+ use this method to split then do the conversion.
+
+ Parameters
+ ----------
+ src : str
+ The input uss path or a file.
+ dest : str
+ The output uss path or a file.
+ from_code : str
+ The source code set of the input path.
+ to_code : str
+ The destination code set for the output path.
+
+ Returns
+ -------
+ bool
+ Indicate whether the conversion is successful or not.
+
+ Raises
+ ------
+ EncodeError
+ When directory is empty or copy multiple files to a single file.
"""
src = self._validate_path(src)
dest = self._validate_path(dest)
@@ -375,18 +472,28 @@ def mvs_convert_encoding(
2) MVS to USS
3) MVS to MVS
- Arguments:
- src: {str} -- The input MVS data set or USS path to be converted
- dest: {str} -- The output MVS data set or USS path to be converted
- from_code: {str} -- The source code set of the input MVS data set
- to_code: {str} -- The destination code set of the output MVS data set
-
- Keyword Arguments:
- src_type {[type]} -- The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS) (default: {None})
- dest_type {[type]} -- The output MVS data set type (default: {None})
-
- Returns:
- boolean -- Indicate whether the conversion is successful or not
+ Parameters
+ ----------
+ src : str
+ The input MVS data set or USS path to be converted.
+ dest : str
+ The output MVS data set or USS path to be converted.
+ from_code : str
+ The source code set of the input MVS data set.
+ to_code : str
+ The destination code set of the output MVS data set.
+
+ Keyword Parameters
+ -----------------
+ src_type : str
+ The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS).
+ dest_type : str
+ The output MVS data set type.
+
+ Returns
+ -------
+ bool
+ Indicate whether the conversion is successful or not.
"""
src = self._validate_data_set_or_path(src)
dest = self._validate_data_set_or_path(dest)
@@ -406,7 +513,7 @@ def mvs_convert_encoding(
rc, out, err = copy.copy_pds2uss(src, temp_src)
if src_type == "VSAM":
reclen, space_u = self.listdsi_data_set(src.upper())
- # RDW takes the first 4 bytes or records in the VB format, hence we need to add an extra buffer to the vsam max recl.
+ # RDW takes the first 4 bytes in the VB format, hence we need to add an extra buffer to the vsam max recl.
reclen += 4
temp_ps = self.temp_data_set(reclen, space_u)
rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps)
@@ -458,11 +565,18 @@ def uss_tag_encoding(self, file_path, tag):
"""Tag the file/directory specified with the given code set.
If `file_path` is a directory, all of the files and subdirectories will
be tagged recursively.
- Arguments:
- file_path {str} -- Absolute file path to tag.
- tag {str} -- Code set to tag the file/directory.
- Raises:
- TaggingError: When the chtag command fails.
+
+ Parameters
+ ----------
+ file_path : str
+ Absolute file path to tag.
+ tag : str
+ Code set to tag the file/directory.
+
+ Raises
+ ------
+ TaggingError
+ When the chtag command fails.
"""
is_dir = os.path.isdir(file_path)
@@ -473,11 +587,18 @@ def uss_tag_encoding(self, file_path, tag):
def uss_file_tag(self, file_path):
"""Returns the current tag set for a file.
- Arguments:
- file_path {str} -- USS path to the file.
- Returns:
- str -- Current tag set for the file, as returned by 'ls -T'
- None -- If the file does not exist or the command fails.
+
+ Parameters
+ ----------
+ file_path : str
+ USS path to the file.
+
+ Returns
+ -------
+ str
+ Current tag set for the file, as returned by 'ls -T'.
+ None
+ If the file does not exist or the command fails.
"""
if not os.path.exists(file_path):
return None
@@ -500,12 +621,50 @@ def uss_file_tag(self, file_path):
class EncodeError(Exception):
def __init__(self, message):
+ """Error during encoding.
+
+ Parameters
+ ----------
+ message : str
+ Human readable string describing the exception.
+
+ Attributes
+ ----------
+ msg : str
+ Human readable string describing the exception.
+ """
self.msg = 'An error occurred during encoding: "{0}"'.format(message)
super(EncodeError, self).__init__(self.msg)
class TaggingError(Exception):
def __init__(self, file_path, tag, rc, stdout, stderr):
+ """Error during tagging.
+
+ Parameters
+ ----------
+ file_path : str
+ File to tag.
+ tag : str
+ Tag to put in the file.
+ rc : int
+ Return code.
+ stdout : str
+ Standard output.
+ stderr : str
+ Standard error.
+
+ Attributes
+ ----------
+ msg : str
+ Human readable string describing the exception.
+ rc : int
+ Return code.
+ stdout : str
+ Standard output.
+ stderr : str
+ Standard error.
+ """
self.msg = 'An error occurred during tagging of {0} to {1}'.format(
file_path,
tag
@@ -518,5 +677,21 @@ def __init__(self, file_path, tag, rc, stdout, stderr):
class MoveFileError(Exception):
def __init__(self, src, dest, e):
+ """Error while moving a file.
+
+ Parameters
+ ----------
+ src : str
+ From where the file moves.
+ dest : str
+ To where the file moves.
+ e : str
+ Exception message.
+
+ Attributes
+ ----------
+ msg : str
+ Human readable string describing the exception.
+ """
self.msg = "Failed when moving {0} to {1}: {2}".format(src, dest, e)
super().__init__(self.msg)
diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py
index 67ddd3d9d..7081e2163 100644
--- a/plugins/module_utils/ickdsf.py
+++ b/plugins/module_utils/ickdsf.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2023
+# Copyright (c) IBM Corporation 2023, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -26,6 +26,22 @@
def get_init_command(module, result, args):
+ """Get init command.
+
+ Parameters
+ ----------
+ module : obj
+ Object from the collection.
+ result : dic
+ Results dictionary.
+ args : dict
+ Arguments to be formatted.
+
+ Returns
+ -------
+ str
+ Formatted JCL strings for zos_mvs_raw.
+ """
# Get parameters from playbooks
address = args.get('address')
@@ -104,6 +120,22 @@ def get_init_command(module, result, args):
def init(module, result, parsed_args):
+ """Init
+
+ Parameters
+ ----------
+ module : object
+ The module to give results of.
+ result : dict
+ The results of the process.
+ parsed_args : dict
+ Parsed arguments to be converted to command.
+
+ Returns
+ -------
+ dict
+ The dictionary with the results.
+ """
# Convert args parsed from module to ickdsf INIT command
cmd = get_init_command(module, result, parsed_args)
diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py
index a7b41a619..7b5031216 100644
--- a/plugins/module_utils/import_handler.py
+++ b/plugins/module_utils/import_handler.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2020
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -15,8 +15,24 @@
class MissingZOAUImport(object):
+ """Error when importing ZOAU.
+ """
def __getattr__(self, name):
def method(*args, **kwargs):
+ """Raises ImportError as a result of a failed ZOAU import.
+
+ Parameters
+ ----------
+ *args : dict
+ Arguments ordered in a dictionary.
+ **kwargs : dict
+ Arguments ordered in a dictionary.
+
+ Raises
+ ------
+ ImportError
+ Unable to import a module or library.
+ """
raise ImportError(
(
"ZOAU is not properly configured for Ansible. Unable to import zoautil_py. "
@@ -28,26 +44,34 @@ def method(*args, **kwargs):
class ZOAUImportError(object):
- """This class serves as a wrapper for any kind of error when importing
- ZOAU. Since ZOAU is used by both modules and module_utils, we need a way
- to alert the user when they're trying to use a function that couldn't be
- imported properly. If we only had to deal with this in modules, we could
- just validate that imports worked at the start of their main functions,
- but on utils, we don't have an entry point where we can validate this.
- Just raising an exception when trying the import would be better, but that
- introduces a failure on Ansible sanity tests, so we can't do it.
-
- Instead, we'll replace what would've been a ZOAU library with this class,
- and the moment ANY method gets called, we finally raise an exception.
- """
-
def __init__(self, exception_traceback):
- """When creating a new instance of this class, we save the traceback
+ """This class serves as a wrapper for any kind of error when importing
+ ZOAU. Since ZOAU is used by both modules and module_utils, we need a way
+ to alert the user when they're trying to use a function that couldn't be
+ imported properly. If we only had to deal with this in modules, we could
+ just validate that imports worked at the start of their main functions,
+ but on utils, we don't have an entry point where we can validate this.
+ Just raising an exception when trying the import would be better, but that
+ introduces a failure on Ansible sanity tests, so we can't do it.
+
+ Instead, we'll replace what would've been a ZOAU library with this class,
+ and the moment ANY method gets called, we finally raise an exception.
+ When creating a new instance of this class, we save the traceback
from the original exception so that users have more context when their
task/code fails. The expected traceback is a string representation of
it, not an actual traceback object. By importing `traceback` from the
standard library and calling `traceback.format_exc()` we can
get this string.
+
+ Parameters
+ ----------
+ exception_traceback : str
+ The formatted traceback of the exception.
+
+ Attributes
+ ----------
+ exception_traceback : str
+ The formatted traceback of the exception.
"""
self.traceback = exception_traceback
@@ -58,6 +82,20 @@ def __getattr__(self, name):
an error while importing ZOAU.
"""
def method(*args, **kwargs):
+ """Raises ImportError as a result of a failed ZOAU import.
+
+ Parameters
+ ----------
+ *args : dict
+ Arguments ordered in a dictionary.
+ **kwargs : dict
+ Arguments ordered in a dictionary.
+
+ Raises
+ ------
+ ImportError
+ Unable to import a module or library.
+ """
raise ImportError(
(
"ZOAU is not properly configured for Ansible. Unable to import zoautil_py. "
@@ -71,10 +109,36 @@ def method(*args, **kwargs):
class MissingImport(object):
def __init__(self, import_name=""):
+ """Error when it is unable to import a module due to it being missing.
+
+ Parameters
+ ----------
+ import_name : str
+ The name of the module to import.
+
+ Attributes
+ ----------
+ import_name : str
+ The name of the module to import.
+ """
self.import_name = import_name
def __getattr__(self, name):
def method(*args, **kwargs):
+ """Raises ImportError as a result of trying to import a missing module.
+
+ Parameter
+ ---------
+ *args : dict
+ Arguments ordered in a dictionary.
+ **kwargs : dict
+ Arguments ordered in a dictionary.
+
+ Raises
+ ------
+ ImportError
+ Unable to import a module or library.
+ """
raise ImportError("Import {0} was not available.".format(self.import_name))
return method
diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py
index 2d4da8962..8d9ac3a5c 100644
--- a/plugins/module_utils/job.py
+++ b/plugins/module_utils/job.py
@@ -18,11 +18,14 @@
import traceback
from time import sleep
from timeit import default_timer as timer
+# Only importing this module so we can catch a JSONDecodeError that sometimes happens
+# when a job's output has non-printable chars that conflict with JSON's control
+# chars.
+from json import JSONDecodeError
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import (
BetterArgParser,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- # MissingZOAUImport,
ZOAUImportError
)
@@ -41,44 +44,47 @@
# from zoautil_py.jobs import read_output, list_dds, listing
from zoautil_py import jobs
except Exception:
- # read_output = MissingZOAUImport()
- # list_dds = MissingZOAUImport()
- # listing = MissingZOAUImport()
jobs = ZOAUImportError(traceback.format_exc())
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
- zoau_version_checker
-)
-
-JOB_ERROR_STATUS = frozenset(["ABEND", # ZOAU job ended abnormally
- "SEC ERROR", # Security error (legacy Ansible code)
- "SEC", # ZOAU security error
- "JCL ERROR", # Job had a JCL error (legacy Ansible code)
- "JCLERR", # ZOAU job had a JCL error
- "CANCELED", # ZOAU job was cancelled
- "CAB", # ZOAU converter abend
- "CNV", # ZOAU converter error
- "SYS", # ZOAU system failure
- "FLU", # ZOAU job was flushed
- "?" # ZOAU error or unknown
- ])
+JOB_ERROR_STATUSES = frozenset(["ABEND", # ZOAU job ended abnormally
+ "SEC ERROR", # Security error (legacy Ansible code)
+ "SEC", # ZOAU security error
+ "JCL ERROR", # Job had a JCL error (legacy Ansible code)
+ "JCLERR", # ZOAU job had a JCL error
+ "CANCELED", # ZOAU job was cancelled
+ "CAB", # ZOAU converter abend
+ "CNV", # ZOAU converter error
+ "SYS", # ZOAU system failure
+ "FLU" # ZOAU job was flushed
+ ])
def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()):
"""Get the output from a z/OS job based on various search criteria.
- Keyword Arguments:
- job_id (str) -- The job ID to search for (default: {None})
- owner (str) -- The owner of the job (default: {None})
- job_name (str) -- The job name search for (default: {None})
- dd_name (str) -- The data definition to retrieve (default: {None})
- dd_scan (bool) - Whether or not to pull information from the dd's for this job {default: {True}}
- duration (int) -- The time the submitted job ran for
- timeout (int) - how long to wait in seconds for a job to complete
- start_time (int) - time the JCL started its submission
-
- Returns:
- list[dict] -- The output information for a list of jobs matching specified criteria.
+ Keyword Parameters
+ ------------------
+ job_id : str
+ The job ID to search for (default: {None}).
+ owner : str
+ The owner of the job (default: {None}).
+ job_name : str
+ The job name search for (default: {None}).
+ dd_name : str
+ The data definition to retrieve (default: {None}).
+ dd_scan : bool
+ Whether or not to pull information from the dd's for this job {default: {True}}.
+ duration : int
+ The time the submitted job ran for.
+ timeout : int
+ How long to wait in seconds for a job to complete.
+ start_time : int
+ Time the JCL started its submission.
+
+ Returns
+ -------
+ Union[dict]
+ The output information for a list of jobs matching specified criteria.
If no job status is found it will return a ret_code diction with
parameter 'msg_txt" = "The job could not be found.
"""
@@ -90,34 +96,69 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru
)
parser = BetterArgParser(arg_defs)
- parsed_args = parser.parse_args(
- {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name}
- )
+ parsed_args = parser.parse_args({
+ "job_id": job_id,
+ "owner": owner,
+ "job_name": job_name,
+ "dd_name": dd_name
+ })
job_id = parsed_args.get("job_id") or "*"
job_name = parsed_args.get("job_name") or "*"
owner = parsed_args.get("owner") or "*"
dd_name = parsed_args.get("dd_name") or ""
- job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name,
- dd_name=dd_name, duration=duration, dd_scan=dd_scan, timeout=timeout, start_time=start_time)
-
- # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout):
- # current_time = timer()
- # duration = round(current_time - start_time)
- # sleep(1)
+ job_detail = _get_job_status(
+ job_id=job_id,
+ owner=owner,
+ job_name=job_name,
+ dd_name=dd_name,
+ duration=duration,
+ dd_scan=dd_scan,
+ timeout=timeout,
+ start_time=start_time
+ )
if len(job_detail) == 0:
# some systems have issues with "*" while some require it to see results
job_id = "" if job_id == "*" else job_id
owner = "" if owner == "*" else owner
job_name = "" if job_name == "*" else job_name
- job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name,
- dd_name=dd_name, dd_scan=dd_scan, duration=duration, timeout=timeout, start_time=start_time)
+
+ job_detail = _get_job_status(
+ job_id=job_id,
+ owner=owner,
+ job_name=job_name,
+ dd_name=dd_name,
+ dd_scan=dd_scan,
+ duration=duration,
+ timeout=timeout,
+ start_time=start_time
+ )
return job_detail
def _job_not_found(job_id, owner, job_name, dd_name):
- # Note that the text in the msg_txt is used in test cases thus sensitive to change
+ """Returns the information of a not founded job.
+
+ Keyword Parameters
+ ------------------
+ job_id : str
+ The job ID to search for (default: {None}).
+ owner : str
+ The owner of the job (default: {None}).
+ job_name : str
+ The job name search for (default: {None}).
+ dd_name : str
+ The data definition to retrieve (default: {None}).
+
+ Returns
+ -------
+ Union[dict]
+ The empty job information in a list.
+ If no job status is found it will return a ret_code diction with
+ parameter 'msg_txt" = "The job could not be found.
+ """
+ # Note that the text in the msg_txt is used in test cases and thus sensitive to change
jobs = []
if job_id != '*' and job_name != '*':
job_not_found_msg = "{0} with the job_id {1}".format(job_name.upper(), job_id.upper())
@@ -160,18 +201,25 @@ def _job_not_found(job_id, owner, job_name, dd_name):
def job_status(job_id=None, owner=None, job_name=None, dd_name=None):
"""Get the status information of a z/OS job based on various search criteria.
- Keyword Arguments:
- job_id {str} -- The job ID to search for (default: {None})
- owner {str} -- The owner of the job (default: {None})
- job_name {str} -- The job name search for (default: {None})
- dd_name {str} -- If populated, return ONLY this DD in the job list (default: {None})
- note: no routines call job_status with dd_name, so we are speeding this routine with
- 'dd_scan=False'
-
- Returns:
- list[dict] -- The status information for a list of jobs matching search criteria.
+ Keyword Parameters
+ ------------------
+ job_id : str
+ The job ID to search for (default: {None}).
+ owner : str
+ The owner of the job (default: {None}).
+ job_name : str
+ The job name search for (default: {None}).
+ dd_name : str
+ If populated, return ONLY this DD in the job list (default: {None})
+ note: no routines call job_status with dd_name, so we are speeding this routine with
+ 'dd_scan=False'.
+
+ Returns
+ -------
+ Union[dict]
+ The status information for a list of jobs matching search criteria.
If no job status is found it will return a ret_code diction with
- parameter 'msg_txt" = "The job could not be found."
+ parameter 'msg_txt" = "The job could not be found.".
"""
arg_defs = dict(
@@ -189,25 +237,40 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None):
job_name = parsed_args.get("job_name") or "*"
owner = parsed_args.get("owner") or "*"
- job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False)
+ job_status_result = _get_job_status(
+ job_id=job_id,
+ owner=owner,
+ job_name=job_name,
+ dd_scan=False
+ )
if len(job_status_result) == 0:
job_id = "" if job_id == "*" else job_id
job_name = "" if job_name == "*" else job_name
owner = "" if owner == "*" else owner
- job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False)
+
+ job_status_result = _get_job_status(
+ job_id=job_id,
+ owner=owner,
+ job_name=job_name,
+ dd_scan=False
+ )
return job_status_result
def _parse_steps(job_str):
- """Parse the dd section of output to retrieve step-wise CC's
+ """Parse the dd section of output to retrieve step-wise CC's.
- Args:
- job_str (str): The content for a given dd.
+ Parameters
+ ----------
+ job_str : str
+ The content for a given dd.
- Returns:
- list[dict]: A list of step names listed as "step executed" the related CC.
+ Returns
+ -------
+ Union[dict]
+ A list of step names listed as "step executed" the related CC.
"""
stp = []
if "STEP WAS EXECUTED" in job_str:
@@ -224,34 +287,50 @@ def _parse_steps(job_str):
def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()):
+ """Get job status.
+
+ Parameters
+ ----------
+ job_id : str
+ The job ID to search for (default: {None}).
+ owner : str
+ The owner of the job (default: {None}).
+ job_name : str
+ The job name search for (default: {None}).
+ dd_name : str
+ The data definition to retrieve (default: {None}).
+ dd_scan : bool
+ Whether or not to pull information from the dd's for this job {default: {True}}.
+ duration : int
+ The time the submitted job ran for.
+ timeout : int
+ How long to wait in seconds for a job to complete.
+ start_time : int
+ Time the JCL started its submission.
+
+ Returns
+ -------
+ Union[dict]
+ The output information for a list of jobs matching specified criteria.
+ If no job status is found it will return a ret_code diction with
+ parameter 'msg_txt" = "The job could not be found.
+ """
if job_id == "*":
job_id_temp = None
else:
# Preserve the original job_id for the failure path
job_id_temp = job_id
- # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4]
- # e.g.: OMVSADM HELLO JOB00126 JCLERR ?
- # jobs.listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not
- # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers
- # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8]
- # creationdatetime=job[9] queueposition=job[10]
- # starting in zoau 1.2.4, program_name[11] was added.
-
- # Testing has shown that the program_name impact is minor, so we're removing that option
- # This will also help maintain compatibility with 1.2.3
-
final_entries = []
- kwargs = {
- "job_id": job_id_temp,
- }
- entries = jobs.listing(**kwargs)
+
+ # In 1.3.0, include_extended has to be set to true so we get the program name for a job.
+ entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True)
while ((entries is None or len(entries) == 0) and duration <= timeout):
current_time = timer()
duration = round(current_time - start_time)
sleep(1)
- entries = jobs.listing(**kwargs)
+ entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True)
if entries:
for entry in entries:
@@ -262,41 +341,35 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T
if not fnmatch.fnmatch(entry.name, job_name):
continue
if job_id_temp is not None:
- if not fnmatch.fnmatch(entry.id, job_id):
+ if not fnmatch.fnmatch(entry.job_id, job_id):
continue
job = {}
- job["job_id"] = entry.id
+ job["job_id"] = entry.job_id
job["job_name"] = entry.name
job["subsystem"] = ""
job["system"] = ""
job["owner"] = entry.owner
+ # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'.
job["ret_code"] = {}
job["ret_code"]["msg"] = entry.status
- # job["ret_code"]["msg"] = entry.status + " " + entry.rc
- job["ret_code"]["msg_code"] = entry.rc
+ job["ret_code"]["msg_code"] = entry.return_code
job["ret_code"]["code"] = None
- if len(entry.rc) > 0:
- if entry.rc.isdigit():
- job["ret_code"]["code"] = int(entry.rc)
+ if entry.return_code and len(entry.return_code) > 0:
+ if entry.return_code.isdigit():
+ job["ret_code"]["code"] = int(entry.return_code)
job["ret_code"]["msg_txt"] = entry.status
- # this section only works on zoau 1.2.3/+ vvv
-
- if zoau_version_checker.is_zoau_version_higher_than("1.2.2"):
- job["job_class"] = entry.job_class
- job["svc_class"] = entry.svc_class
- job["priority"] = entry.priority
- job["asid"] = entry.asid
- job["creation_date"] = str(entry.creation_datetime)[0:10]
- job["creation_time"] = str(entry.creation_datetime)[12:]
- job["queue_position"] = entry.queue_position
- if zoau_version_checker.is_zoau_version_higher_than("1.2.3"):
- job["program_name"] = entry.program_name
-
- # this section only works on zoau 1.2.3/+ ^^^
-
+ # Beginning in ZOAU v1.3.0, the Job class changes svc_class to service_class.
+ job["svc_class"] = entry.service_class
+ job["job_class"] = entry.job_class
+ job["priority"] = entry.priority
+ job["asid"] = entry.asid
+ job["creation_date"] = str(entry.creation_datetime)[0:10]
+ job["creation_time"] = str(entry.creation_datetime)[12:]
+ job["queue_position"] = entry.queue_position
+ job["program_name"] = entry.program_name
job["class"] = ""
job["content_type"] = ""
job["ret_code"]["steps"] = []
@@ -311,18 +384,16 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T
list_of_dds = []
try:
- list_of_dds = jobs.list_dds(entry.id)
- except exceptions.DDQueryException as err:
- if 'BGYSC5201E' in str(err):
- is_dd_query_exception = True
- pass
+ list_of_dds = jobs.list_dds(entry.job_id)
+ except exceptions.DDQueryException:
+ is_dd_query_exception = True
# Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s.
# Idea here is to force a TYPRUN{HOLD|JCLHOLD|COPY} job to go the full wait duration since we have
# currently no way to detect them, but if we know the job is one of the JOB_ERROR_STATUS lets
# exit the wait time supplied as we know it is a job failure.
- is_jesjcl = True if search_dictionaries("dataset", "JESJCL", list_of_dds) else False
- is_job_error_status = True if entry.status in JOB_ERROR_STATUS else False
+ is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False
+ is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False
while ((list_of_dds is None or len(list_of_dds) == 0 or is_dd_query_exception) and
(not is_jesjcl and not is_job_error_status and duration <= timeout)):
@@ -330,33 +401,32 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T
duration = round(current_time - start_time)
sleep(1)
try:
- # Note, in then event of an exception, eg job has TYPRUN=HOLD
+ # Note, in the event of an exception, eg job has TYPRUN=HOLD
# list_of_dds will still be populated with valuable content
- list_of_dds = jobs.list_dds(entry.id)
- is_jesjcl = True if search_dictionaries("dataset", "JESJCL", list_of_dds) else False
- is_job_error_status = True if entry.status in JOB_ERROR_STATUS else False
- except exceptions.DDQueryException as err:
- if 'BGYSC5201E' in str(err):
- is_dd_query_exception = True
- continue
+ list_of_dds = jobs.list_dds(entry.job_id)
+ is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False
+ is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False
+ except exceptions.DDQueryException:
+ is_dd_query_exception = True
+ continue
job["duration"] = duration
for single_dd in list_of_dds:
dd = {}
+ if "dd_name" not in single_dd:
+ continue
+
# If dd_name not None, only that specific dd_name should be returned
if dd_name is not None:
- if dd_name not in single_dd["dataset"]:
+ if dd_name not in single_dd["dd_name"]:
continue
else:
- dd["ddname"] = single_dd["dataset"]
+ dd["ddname"] = single_dd["dd_name"]
- if "dataset" not in single_dd:
- continue
-
- if "recnum" in single_dd:
- dd["record_count"] = single_dd["recnum"]
+ if "records" in single_dd:
+ dd["record_count"] = single_dd["records"]
else:
dd["record_count"] = None
@@ -365,41 +435,42 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T
else:
dd["id"] = "?"
- if "stepname" in single_dd:
- dd["stepname"] = single_dd["stepname"]
+ if "step_name" in single_dd:
+ dd["stepname"] = single_dd["step_name"]
else:
dd["stepname"] = None
if "procstep" in single_dd:
dd["procstep"] = single_dd["procstep"]
else:
- dd["proctep"] = None
+ dd["procstep"] = None
- if "length" in single_dd:
- dd["byte_count"] = single_dd["length"]
+ if "record_length" in single_dd:
+ dd["byte_count"] = single_dd["record_length"]
else:
dd["byte_count"] = 0
tmpcont = None
- if "stepname" in single_dd:
- if "dataset" in single_dd:
- # In case ZOAU fails when reading the job output, we'll
- # add a message to the user telling them of this.
- # ZOAU cannot read partial output from a job, so we
- # have to make do with nothing from this step if it fails.
+ if "step_name" in single_dd:
+ if "dd_name" in single_dd:
+ # In case ZOAU fails when reading the job output, we'll add a
+ # message to the user telling them of this. ZOAU cannot read
+ # partial output from a job, so we have to make do with nothing
+ # from this step if it fails.
try:
tmpcont = jobs.read_output(
- entry.id,
- single_dd["stepname"],
- single_dd["dataset"]
+ entry.job_id,
+ single_dd["step_name"],
+ single_dd["dd_name"]
)
- except UnicodeDecodeError:
+ except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError):
tmpcont = (
"Non-printable UTF-8 characters were present in this output. "
- "Please access it manually."
+ "Please access it from the job log."
)
dd["content"] = tmpcont.split("\n")
+
job["ret_code"]["steps"].extend(_parse_steps(tmpcont))
job["ddnames"].append(dd)
@@ -420,19 +491,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T
job["subsystem"] = (tmptext.split("\n")[
0]).replace(" ", "")
- # Disabling this code, the integer following JCL ERROR is not a reason code, its a
- # multi line marker used in a WTO so errors spanning more than one line will be found
- # by searching for the prefix integer, eg 029
- # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 "
- # then further reduce down to: 'JCL ERROR 029'
- # if job["ret_code"]["msg_code"] == "?":
- # if "JOB NOT RUN -" in tmpcont:
- # tmptext = tmpcont.split(
- # "JOB NOT RUN -")[1].split("\n")[0]
- # job["ret_code"]["msg"] = tmptext.strip()
- # job["ret_code"]["msg_code"] = None
- # job["ret_code"]["code"] = None
-
final_entries.append(job)
if not final_entries:
final_entries = _job_not_found(job_id, owner, job_name, "unavailable")
@@ -440,19 +498,25 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T
def _ddname_pattern(contents, resolve_dependencies):
- """Resolver for ddname_pattern type arguments
+ """Resolver for ddname_pattern type arguments.
- Arguments:
- contents {bool} -- The contents of the argument.
+ Parameters
+ ----------
+ contents : bool
+ The contents of the argument.
resolved_dependencies {dict} -- Contains all of the dependencies and their contents,
which have already been handled,
for use during current arguments handling operations.
- Raises:
- ValueError: When contents is invalid argument type
+ Returns
+ -------
+ str
+ The arguments contents after any necessary operations.
- Returns:
- str -- The arguments contents after any necessary operations.
+ Raises
+ ------
+ ValueError
+ When contents is invalid argument type.
"""
if not re.fullmatch(
r"^(?:[A-Z]{1}[A-Z0-9]{0,7})|(?:\?{1})$",
diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py
index c08847503..a645d3362 100644
--- a/plugins/module_utils/validation.py
+++ b/plugins/module_utils/validation.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2023
+# Copyright (c) IBM Corporation 2023, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -22,11 +22,25 @@
def validate_safe_path(path):
- """
- This function is implemented to validate against path traversal attack
+ """This function is implemented to validate against path traversal attack
when using os.path.join function.
In this action plugin, path is on the controller.
+
+ Parameters
+ ----------
+ path : str
+ A file's path.
+
+ Returns
+ -------
+ str
+ The introduced path.
+
+ Raises
+ ------
+ DirectoryTraversalError
+ User does not have access to a directory.
"""
if not os.path.isabs(path):
real_path = os.path.realpath(path)
@@ -39,6 +53,18 @@ def validate_safe_path(path):
class DirectoryTraversalError(Exception):
+ """User does not have access to a directory.
+
+ Parameters
+ ----------
+ path : str
+ Directory path.
+
+ Attributes
+ ----------
+ msg : str
+ Human readable string describing the exception.
+ """
def __init__(self, path):
self.msg = "Detected directory traversal, user does not have access to {0}".format(path)
super().__init__(self.msg)
diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py
index 7c2badf84..bc865d098 100644
--- a/plugins/module_utils/zos_mvs_raw.py
+++ b/plugins/module_utils/zos_mvs_raw.py
@@ -1,4 +1,4 @@
-# Copyright (c) IBM Corporation 2020, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -24,7 +24,7 @@ class MVSCmd(object):
"""
@staticmethod
- def execute(pgm, dds, parm="", debug=False, verbose=False):
+ def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None):
"""Execute an unauthorized MVS command.
Args:
@@ -36,9 +36,10 @@ def execute(pgm, dds, parm="", debug=False, verbose=False):
MVSCmdResponse: The response of the command.
"""
module = AnsibleModuleHelper(argument_spec={})
- command = "mvscmd {0} {1} {2} ".format(
+ command = "mvscmd {0} {1} {2} {3}".format(
"-d" if debug else "",
"-v" if verbose else "",
+ "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "",
MVSCmd._build_command(pgm, dds, parm),
)
rc, out, err = module.run_command(command)
@@ -64,7 +65,6 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No
"--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "",
MVSCmd._build_command(pgm, dds, parm),
)
-
rc, out, err = module.run_command(command)
return MVSCmdResponse(rc, out, err)
diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py
index 72ada160e..c9cc8ba6b 100644
--- a/plugins/modules/zos_apf.py
+++ b/plugins/modules/zos_apf.py
@@ -59,7 +59,7 @@
- The identifier for the volume containing the library specified in
the C(library) parameter. The values must be one the following.
- 1. The volume serial number.
- - 2. Six asterisks (******), indicating that the system must use the
+ - 2. Six asterisks C(******), indicating that the system must use the
volume serial number of the current system residence (SYSRES) volume.
- 3. *MCAT*, indicating that the system must use the volume serial number
of the volume containing the master catalog.
@@ -176,7 +176,7 @@
specified on the C(library) parameter. The values must be one of the
following.
- 1. The volume serial number
- - 2. Six asterisks (******), indicating that the system must use the
+ - 2. Six asterisks C(******), indicating that the system must use the
volume serial number of the current system residence (SYSRES)
volume.
- 3. *MCAT*, indicating that the system must use the volume serial
@@ -221,7 +221,7 @@
- name: Add a library (cataloged) to the APF list and persistence
zos_apf:
library: SOME.SEQUENTIAL.DATASET
- force_dynamic: True
+ force_dynamic: true
persistent:
data_set_name: SOME.PARTITIONED.DATASET(MEM)
- name: Remove a library from the APF list and persistence
@@ -239,7 +239,7 @@
batch:
- library: SOME.SEQ.DS1
- library: SOME.SEQ.DS2
- sms: True
+ sms: true
- library: SOME.SEQ.DS3
volume: T12345
- name: Print the APF list matching library pattern or volume serial number
@@ -292,17 +292,19 @@
import re
import json
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
better_arg_parser, data_set, backup as Backup)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
+import traceback
try:
from zoautil_py import zsystem
except Exception:
- Datasets = MissingZOAUImport()
+ zsystem = ZOAUImportError(traceback.format_exc())
# supported data set types
@@ -310,6 +312,30 @@
def backupOper(module, src, backup, tmphlq=None):
+ """Create a backup for a specified USS file or MVS data set.
+
+ Parameters
+ ----------
+ module : AnsibleModule
+ src : str
+ Source USS file or data set to backup.
+ backup : str
+ Name for the backup.
+ tmphlq : str
+ The name of the temporary high level qualifier to use.
+
+ Returns
+ -------
+ str
+ Backup name.
+
+ Raises
+ ------
+ fail_json
+ Data set type is NOT supported.
+ fail_json
+ Creating backup has failed.
+ """
# analysis the file type
ds_utils = data_set.DataSetUtils(src)
file_type = ds_utils.ds_type()
@@ -334,6 +360,19 @@ def backupOper(module, src, backup, tmphlq=None):
def main():
+ """Initialize the module.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ Marker length may not exceed 72 characters.
+ fail_json
+ library is required.
+ fail_json
+ An exception occurred.
+ """
module = AnsibleModule(
argument_spec=dict(
library=dict(
@@ -522,6 +561,12 @@ def main():
result['rc'] = operRc
result['stdout'] = operOut
if operation == 'list':
+ try:
+ data = json.loads(operOut)
+ data_sets = data["data"]["datasets"]
+ except Exception as e:
+ err_msg = "An exception occurred. See stderr for more details."
+ module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr)
if not library:
library = ""
if not volume:
@@ -529,20 +574,26 @@ def main():
if sms:
sms = "*SMS*"
if library or volume or sms:
- try:
- data = json.loads(operOut)
- except json.JSONDecodeError:
- module.exit_json(**result)
- for d in data[2:]:
+ ds_list = ""
+ for d in data_sets:
ds = d.get('ds')
vol = d.get('vol')
try:
if (library and re.match(library, ds)) or (volume and re.match(volume, vol)) or (sms and sms == vol):
- operOut = operOut + "{0} {1}\n".format(vol, ds)
+ ds_list = ds_list + "{0} {1}\n".format(vol, ds)
except re.error:
- result['stdout'] = operOut
module.exit_json(**result)
- result['stdout'] = operOut
+ result['stdout'] = ds_list
+ else:
+ """
+ ZOAU 1.3 changed the output from apf, having the data set list inside a new "data" tag.
+ To keep consistency with previous ZOAU versions now we have to filter the json response.
+ """
+ try:
+ result['stdout'] = json.dumps(data.get("data"))
+ except Exception as e:
+ err_msg = "An exception occurred. See stderr for more details."
+ module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr)
module.exit_json(**result)
diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py
index 05bbfc935..aca95e2f7 100644
--- a/plugins/modules/zos_archive.py
+++ b/plugins/modules/zos_archive.py
@@ -81,8 +81,8 @@
type: str
required: false
choices:
- - PACK
- - SPACK
+ - pack
+ - spack
xmit_log_data_set:
description:
- Provide the name of a data set to store xmit log output.
@@ -193,9 +193,9 @@
- Organization of the destination
type: str
required: false
- default: SEQ
+ default: seq
choices:
- - SEQ
+ - seq
space_primary:
description:
- If the destination I(dest) data set does not exist , this sets the
@@ -214,28 +214,28 @@
description:
- If the destination data set does not exist, this sets the unit of
measurement to use when defining primary and secondary space.
- - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK).
+ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk).
type: str
choices:
- - K
- - M
- - G
- - CYL
- - TRK
+ - k
+ - m
+ - g
+ - cyl
+ - trk
required: false
record_format:
description:
- If the destination data set does not exist, this sets the format of
the
data set. (e.g C(FB))
- - Choices are case-insensitive.
+ - Choices are case-sensitive.
required: false
choices:
- - FB
- - VB
- - FBA
- - VBA
- - U
+ - fb
+ - vb
+ - fba
+ - vba
+ - u
type: str
record_length:
description:
@@ -356,8 +356,8 @@
format:
name: terse
format_options:
- terse_pack: "SPACK"
- use_adrdssu: True
+ terse_pack: "spack"
+ use_adrdssu: true
# Use a pattern to store
- name: Compress data set pattern using xmit
@@ -424,7 +424,7 @@
mvs_cmd,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
import os
import tarfile
@@ -433,13 +433,14 @@
import glob
import re
import math
+import traceback
from hashlib import sha256
try:
from zoautil_py import datasets
except Exception:
- Datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
XMIT_RECORD_LENGTH = 80
AMATERSE_RECORD_LENGTH = 1024
@@ -789,24 +790,22 @@ def _create_dest_data_set(
if tmp_hlq:
hlq = tmp_hlq
else:
- rc, hlq, err = self.module.run_command("hlq")
- hlq = hlq.replace('\n', '')
- cmd = "mvstmphelper {0}.DZIP".format(hlq)
- rc, temp_ds, err = self.module.run_command(cmd)
- arguments.update(name=temp_ds.replace('\n', ''))
+ hlq = datasets.get_hlq()
+ temp_ds = datasets.tmp_name(high_level_qualifier=hlq)
+ arguments.update(name=temp_ds)
if record_format is None:
- arguments.update(record_format="FB")
+ arguments.update(record_format="fb")
if record_length is None:
arguments.update(record_length=80)
if type is None:
- arguments.update(type="SEQ")
+ arguments.update(type="seq")
if space_primary is None:
arguments.update(space_primary=5)
if space_secondary is None:
arguments.update(space_secondary=3)
if space_type is None:
- arguments.update(space_type="M")
+ arguments.update(space_type="m")
arguments.pop("self")
changed = data_set.DataSet.ensure_present(**arguments)
return arguments["name"], changed
@@ -820,8 +819,8 @@ def create_dest_ds(self, name):
name {str} - name of the newly created data set.
"""
record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH
- data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length)
- # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length)
+ data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length)
+ # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length)
# cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name)
# rc, out, err = self.module.run_command(cmd)
@@ -902,8 +901,8 @@ def expand_mvs_paths(self, paths):
expanded_path = []
for path in paths:
if '*' in path:
- e_paths = datasets.listing(path)
- e_paths = [path.name for path in e_paths]
+ # list_dataset_names returns a list of data set names or empty.
+ e_paths = datasets.list_dataset_names(path)
else:
e_paths = [path]
expanded_path.extend(e_paths)
@@ -946,22 +945,26 @@ def compute_dest_size(self):
{int} - Destination computed space in kilobytes.
"""
if self.dest_data_set.get("space_primary") is None:
- dest_space = 0
+ dest_space = 1
for target in self.targets:
- data_sets = datasets.listing(target)
+ data_sets = datasets.list_datasets(target)
for ds in data_sets:
- dest_space += int(ds.to_dict().get("total_space"))
+ dest_space += int(ds.total_space)
# space unit returned from listings is bytes
dest_space = math.ceil(dest_space / 1024)
- self.dest_data_set.update(space_primary=dest_space, space_type="K")
+ self.dest_data_set.update(space_primary=dest_space, space_type="k")
class AMATerseArchive(MVSArchive):
def __init__(self, module):
super(AMATerseArchive, self).__init__(module)
self.pack_arg = module.params.get("format").get("format_options").get("terse_pack")
+ # We store pack_ard in uppercase because the AMATerse command requires
+ # it in uppercase.
if self.pack_arg is None:
self.pack_arg = "SPACK"
+ else:
+ self.pack_arg = self.pack_arg.upper()
def add(self, src, archive):
"""
@@ -988,8 +991,8 @@ def archive_targets(self):
"""
if self.use_adrdssu:
source, changed = self._create_dest_data_set(
- type="SEQ",
- record_format="U",
+ type="seq",
+ record_format="u",
record_length=0,
tmp_hlq=self.tmphlq,
replace=True,
@@ -1007,8 +1010,8 @@ def archive_targets(self):
dest, changed = self._create_dest_data_set(
name=self.dest,
replace=True,
- type='SEQ',
- record_format='FB',
+ type='seq',
+ record_format='fb',
record_length=AMATERSE_RECORD_LENGTH,
space_primary=self.dest_data_set.get("space_primary"),
space_type=self.dest_data_set.get("space_type"))
@@ -1057,8 +1060,8 @@ def archive_targets(self):
"""
if self.use_adrdssu:
source, changed = self._create_dest_data_set(
- type="SEQ",
- record_format="U",
+ type="seq",
+ record_format="u",
record_length=0,
tmp_hlq=self.tmphlq,
replace=True,
@@ -1076,8 +1079,8 @@ def archive_targets(self):
dest, changed = self._create_dest_data_set(
name=self.dest,
replace=True,
- type='SEQ',
- record_format='FB',
+ type='seq',
+ record_format='fb',
record_length=XMIT_RECORD_LENGTH,
space_primary=self.dest_data_set.get("space_primary"),
space_type=self.dest_data_set.get("space_type"))
@@ -1138,7 +1141,7 @@ def run_module():
options=dict(
terse_pack=dict(
type='str',
- choices=['PACK', 'SPACK'],
+ choices=['pack', 'spack'],
),
xmit_log_data_set=dict(
type='str',
@@ -1164,9 +1167,9 @@ def run_module():
),
type=dict(
type='str',
- choices=['SEQ'],
+ choices=['seq'],
required=False,
- default="SEQ",
+ default="seq",
),
space_primary=dict(
type='int', required=False),
@@ -1174,12 +1177,12 @@ def run_module():
type='int', required=False),
space_type=dict(
type='str',
- choices=['K', 'M', 'G', 'CYL', 'TRK'],
+ choices=['k', 'm', 'g', 'cyl', 'trk'],
required=False,
),
record_format=dict(
type='str',
- choices=["FB", "VB", "FBA", "VBA", "U"],
+ choices=["fb", "vb", "fba", "vba", "u"],
required=False
),
record_length=dict(type='int', required=False),
@@ -1215,7 +1218,7 @@ def run_module():
terse_pack=dict(
type='str',
required=False,
- choices=['PACK', 'SPACK'],
+ choices=['pack', 'spack'],
),
xmit_log_data_set=dict(
type='str',
@@ -1227,7 +1230,7 @@ def run_module():
)
),
default=dict(
- terse_pack="SPACK",
+ terse_pack="spack",
xmit_log_data_set="",
use_adrdssu=False),
),
@@ -1235,7 +1238,7 @@ def run_module():
default=dict(
name="",
format_options=dict(
- terse_pack="SPACK",
+ terse_pack="spack",
xmit_log_data_set="",
use_adrdssu=False
)
@@ -1250,7 +1253,7 @@ def run_module():
required=False,
options=dict(
name=dict(arg_type='str', required=False),
- type=dict(arg_type='str', required=False, default="SEQ"),
+ type=dict(arg_type='str', required=False, default="seq"),
space_primary=dict(arg_type='int', required=False),
space_secondary=dict(
arg_type='int', required=False),
diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py
index 080c7efab..7ac50b550 100644
--- a/plugins/modules/zos_backup_restore.py
+++ b/plugins/modules/zos_backup_restore.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -168,15 +168,15 @@
space_type:
description:
- The unit of measurement to use when defining data set space.
- - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK).
- - When I(full_volume=True), I(space_type) defaults to C(G), otherwise default is C(M)
+ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk).
+ - When I(full_volume=True), I(space_type) defaults to C(g), otherwise default is C(m)
type: str
choices:
- - K
- - M
- - G
- - CYL
- - TRK
+ - k
+ - m
+ - g
+ - cyl
+ - trk
required: false
aliases:
- unit
@@ -186,6 +186,14 @@
- Defaults to running user's username.
type: str
required: false
+ tmp_hlq:
+ description:
+ - Override the default high level qualifier (HLQ) for temporary and backup
+ data sets.
+ - The default HLQ is the Ansible user that executes the module and if
+ that is not available, then the value of C(TMPHLQ) is used.
+ required: false
+ type: str
"""
RETURN = r""""""
@@ -215,7 +223,7 @@
data_sets:
include: user.**
backup_name: /tmp/temp_backup.dzp
- recover: yes
+ recover: true
- name: Backup all datasets matching the pattern USER.** to data set MY.BACKUP.DZP,
allocate 100MB for data sets used in backup process.
@@ -225,7 +233,7 @@
include: user.**
backup_name: MY.BACKUP.DZP
space: 100
- space_type: M
+ space_type: m
- name:
Backup all datasets matching the pattern USER.** that are present on the volume MYVOL1 to data set MY.BACKUP.DZP,
@@ -237,7 +245,7 @@
volume: MYVOL1
backup_name: MY.BACKUP.DZP
space: 100
- space_type: M
+ space_type: m
- name: Backup an entire volume, MYVOL1, to the UNIX file /tmp/temp_backup.dzp,
allocate 1GB for data sets used in backup process.
@@ -245,9 +253,9 @@
operation: backup
backup_name: /tmp/temp_backup.dzp
volume: MYVOL1
- full_volume: yes
+ full_volume: true
space: 1
- space_type: G
+ space_type: g
- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp.
Use z/OS username as new HLQ.
@@ -288,10 +296,10 @@
zos_backup_restore:
operation: restore
volume: MYVOL2
- full_volume: yes
+ full_volume: true
backup_name: MY.BACKUP.DZP
space: 1
- space_type: G
+ space_type: g
- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp.
Specify DB2SMS10 for the SMS storage and management classes to use for the restored
@@ -312,15 +320,16 @@
from re import match, search, IGNORECASE
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
from os import path
-
+import traceback
try:
- from zoautil_py import datasets, exceptions
+ from zoautil_py import datasets
+ from zoautil_py import exceptions as zoau_exceptions
except ImportError:
- datasets = MissingZOAUImport()
- exceptions = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
+ zoau_exceptions = ZOAUImportError(traceback.format_exc())
def main():
@@ -337,7 +346,7 @@ def main():
),
),
space=dict(type="int", required=False, aliases=["size"]),
- space_type=dict(type="str", required=False, aliases=["unit"], choices=["K", "M", "G", "CYL", "TRK"]),
+ space_type=dict(type="str", required=False, aliases=["unit"], choices=["k", "m", "g", "cyl", "trk"]),
volume=dict(type="str", required=False),
full_volume=dict(type="bool", default=False),
temp_volume=dict(type="str", required=False, aliases=["dest_volume"]),
@@ -347,6 +356,7 @@ def main():
sms_storage_class=dict(type="str", required=False),
sms_management_class=dict(type="str", required=False),
hlq=dict(type="str", required=False),
+ tmp_hlq=dict(type="str", required=False),
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=False)
@@ -365,6 +375,7 @@ def main():
sms_storage_class = params.get("sms_storage_class")
sms_management_class = params.get("sms_management_class")
hlq = params.get("hlq")
+ tmp_hlq = params.get("tmp_hlq")
if operation == "backup":
backup(
@@ -380,6 +391,7 @@ def main():
space_type=space_type,
sms_storage_class=sms_storage_class,
sms_management_class=sms_management_class,
+ tmp_hlq=tmp_hlq,
)
else:
restore(
@@ -396,6 +408,7 @@ def main():
space_type=space_type,
sms_storage_class=sms_storage_class,
sms_management_class=sms_management_class,
+ tmp_hlq=tmp_hlq,
)
result["changed"] = True
@@ -444,6 +457,7 @@ def parse_and_validate_args(params):
sms_storage_class=dict(type=sms_type, required=False),
sms_management_class=dict(type=sms_type, required=False),
hlq=dict(type=hlq_type, default=hlq_default, dependencies=["operation"]),
+ tmp_hlq=dict(type=hlq_type, required=False),
)
parsed_args = BetterArgParser(arg_defs).parse_args(params)
@@ -466,6 +480,7 @@ def backup(
space_type,
sms_storage_class,
sms_management_class,
+ tmp_hlq,
):
"""Backup data sets or a volume to a new data set or unix file.
@@ -482,10 +497,11 @@ def backup(
space_type (str): The unit of measurement to use when defining data set space.
sms_storage_class (str): Specifies the storage class to use.
sms_management_class (str): Specifies the management class to use.
+ tmp_hlq (str): Specifies the tmp hlq to temporary datasets
"""
args = locals()
zoau_args = to_dzip_args(**args)
- datasets.zip(**zoau_args)
+ datasets.dzip(**zoau_args)
def restore(
@@ -502,6 +518,7 @@ def restore(
space_type,
sms_storage_class,
sms_management_class,
+ tmp_hlq,
):
"""[summary]
@@ -523,23 +540,26 @@ def restore(
space_type (str): The unit of measurement to use when defining data set space.
sms_storage_class (str): Specifies the storage class to use.
sms_management_class (str): Specifies the management class to use.
+ tmp_hlq (str): : Specifies the tmp hlq to temporary datasets
"""
args = locals()
zoau_args = to_dunzip_args(**args)
- response = datasets._unzip(**zoau_args)
+ output = ""
+ try:
+ rc = datasets.dunzip(**zoau_args)
+ except zoau_exceptions.ZOAUException as dunzip_exception:
+ output = dunzip_exception.response.stdout_response
+ output = output + dunzip_exception.response.stderr_response
+ rc = get_real_rc(output)
failed = False
- true_rc = response.rc
- if response.rc > 0:
- output = response.stdout_response + response.stderr_response
- true_rc = get_real_rc(output) or true_rc
- if true_rc > 0 and true_rc <= 4:
+ if rc > 0 and rc <= 4:
if recover is not True:
failed = True
- elif true_rc > 0:
+ elif rc > 4:
failed = True
if failed:
- raise exceptions.ZOAUException(
- "%s,RC=%s" % (response.stderr_response, response.rc)
+ raise zoau_exceptions.ZOAUException(
+ "{0}, RC={1}".format(output, rc)
)
@@ -631,7 +651,7 @@ def hlq_default(contents, dependencies):
"""
hlq = None
if dependencies.get("operation") == "restore":
- hlq = datasets.hlq()
+ hlq = datasets.get_hlq()
return hlq
@@ -689,12 +709,12 @@ def space_type_type(contents, dependencies):
"""
if contents is None:
if dependencies.get("full_volume"):
- return "G"
+ return "g"
else:
- return "M"
- if not match(r"^(M|G|K|TRK|CYL)$", contents, IGNORECASE):
+ return "m"
+ if not match(r"^(m|g|k|trk|cyl)$", contents, IGNORECASE):
raise ValueError(
- 'Value {0} is invalid for space_type argument. Valid space types are "K", "M", "G", "TRK" or "CYL".'.format(
+ 'Value {0} is invalid for space_type argument. Valid space types are "k", "m", "g", "trk" or "cyl".'.format(
contents
)
)
@@ -791,6 +811,10 @@ def to_dzip_args(**kwargs):
if kwargs.get("space_type"):
size += kwargs.get("space_type")
zoau_args["size"] = size
+
+ if kwargs.get("tmp_hlq"):
+ zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq"))
+
return zoau_args
@@ -844,7 +868,10 @@ def to_dunzip_args(**kwargs):
zoau_args["size"] = size
if kwargs.get("hlq"):
- zoau_args["hlq"] = kwargs.get("hlq")
+ zoau_args["high_level_qualifier"] = kwargs.get("hlq")
+
+ if kwargs.get("tmp_hlq"):
+ zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq"))
return zoau_args
diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py
index 7a2adf7cc..232b711db 100644
--- a/plugins/modules/zos_blockinfile.py
+++ b/plugins/modules/zos_blockinfile.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020 - 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -325,17 +325,18 @@
"""
import json
+import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
better_arg_parser, data_set, backup as Backup)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
try:
from zoautil_py import datasets
except Exception:
- Datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
# supported data set types
@@ -379,14 +380,15 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force):
- BOF
- '*regex*'
encoding: {str} -- Encoding of the src.
- force: {str} -- If not empty passes the -f option to dmod cmd.
+ force: {bool} -- If not empty passes True option to dmod cmd.
Returns:
str -- Information in JSON format. keys:
cmd: {str} -- dmod shell command
found: {int} -- Number of matching regex pattern
changed: {bool} -- Indicates if the destination was modified.
"""
- return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, options=force, as_json=True)
+ return datasets.blockinfile(src, True, block=block, marker=marker, insert_after=ins_aft,
+ insert_before=ins_bef, encoding=encoding, force=force, as_json=True)
def absent(src, marker, encoding, force):
@@ -395,14 +397,14 @@ def absent(src, marker, encoding, force):
src: {str} -- The z/OS USS file or data set to modify.
marker: {str} -- Identifies the block to be removed.
encoding: {str} -- Encoding of the src.
- force: {str} -- If not empty passes the -f option to dmod cmd.
+ force: {bool} -- If not empty passes the value True option to dmod cmd.
Returns:
str -- Information in JSON format. keys:
cmd: {str} -- dmod shell command
found: {int} -- Number of matching regex pattern
changed: {bool} -- Indicates if the destination was modified.
"""
- return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, options=force, as_json=True)
+ return datasets.blockinfile(src, False, marker=marker, encoding=encoding, force=force, as_json=True)
def quotedString(string):
@@ -412,10 +414,71 @@ def quotedString(string):
return string.replace('"', "")
-def quoted_string_output_json(string):
+def quotedString_double_quotes(string):
+ # add escape if string was quoted
if not isinstance(string, str):
return string
- return string.replace('"', "u'")
+ return string.replace('"', '\\"')
+
+
+def check_double_quotes(marker, ins_bef, ins_aft, block):
+ if marker:
+ if '"' in marker:
+ return True
+ if ins_bef:
+ if '"' in ins_bef:
+ return True
+ if ins_aft:
+ if '"' in ins_aft:
+ return True
+ if block:
+ if '"' in block:
+ return True
+ return False
+
+
+def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=None, ins_aft=None):
+ block = block.replace('"', '\\"')
+ force = "-f" if force else ""
+ encoding = "-c {0}".format(encoding) if encoding else ""
+ marker = "-m \"{0}\"".format(marker) if marker else ""
+ if state:
+ if ins_aft:
+ if ins_aft == "EOF":
+ opts = f'"$ a\\{block}" "{src}"'
+ else:
+ opts = f'-s -e "/{ins_aft}/a\\{block}/$" -e "$ a\\{block}" "{src}"'
+ elif ins_bef:
+ if ins_bef == "BOF":
+ opts = f' "1 i\\{block}" "{src}" '
+ else:
+ opts = f'-s -e "/{ins_bef}/i\\{block}/$" -e "$ a\\{block}" "{src}"'
+
+ cmd = "dmod -b {0} {1} {2} {3}".format(force, encoding, marker, opts)
+ else:
+ cmd = """dmod -b {0} {1} {2} {3}""".format(force, encoding, marker, src)
+
+ rc, stdout, stderr = module.run_command(cmd)
+ cmd = clean_command(cmd)
+ return rc, cmd
+
+
+def clean_command(cmd):
+ cmd = cmd.replace('/c\\\\', '')
+ cmd = cmd.replace('/a\\\\', '', )
+ cmd = cmd.replace('/i\\\\', '', )
+ cmd = cmd.replace('$ a\\\\', '', )
+ cmd = cmd.replace('1 i\\\\', '', )
+ cmd = cmd.replace('/c\\', '')
+ cmd = cmd.replace('/a\\', '')
+ cmd = cmd.replace('/i\\', '')
+ cmd = cmd.replace('$ a\\', '')
+ cmd = cmd.replace('1 i\\', '')
+ cmd = cmd.replace('/d', '')
+ cmd = cmd.replace('\\\\d', '')
+ cmd = cmd.replace('\\n', '\n')
+ cmd = cmd.replace('\\"', '"')
+ return cmd
def main():
@@ -540,7 +603,6 @@ def main():
marker_begin = 'BEGIN'
if not marker_end:
marker_end = 'END'
- force = '-f' if force else ''
marker = "{0}\\n{1}\\n{2}".format(marker_begin, marker_end, marker)
block = transformBlock(block, ' ', indentation)
@@ -558,6 +620,7 @@ def main():
module.fail_json(msg=message)
file_type = 0
+ return_content = None
if backup:
# backup can be True(bool) or none-zero length string. string indicates that backup_name was provided.
# setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use
@@ -571,45 +634,52 @@ def main():
result['backup_name'] = Backup.mvs_file_backup(dsn=src, bk_dsn=backup, tmphlq=tmphlq)
except Exception as err:
module.fail_json(msg="Unable to allocate backup {0} destination: {1}".format(backup, str(err)))
+ double_quotes_exists = check_double_quotes(marker, ins_bef, ins_aft, block)
# state=present, insert/replace a block with matching regex pattern
# state=absent, delete blocks with matching regex pattern
if parsed_args.get('state') == 'present':
- return_content = present(src, block, quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force)
+ if double_quotes_exists:
+ rc, cmd = execute_dmod(src, block, quotedString_double_quotes(marker), force, encoding, True, module=module,
+ ins_bef=quotedString_double_quotes(ins_bef), ins_aft=quotedString_double_quotes(ins_aft))
+ result['rc'] = rc
+ result['cmd'] = cmd
+ result['changed'] = True if rc == 0 else False
+ stderr = 'Failed to insert new entry' if rc != 0 else ""
+ else:
+ return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force)
else:
- return_content = absent(src, quotedString(marker), encoding, force)
- stdout = return_content.stdout_response
- stderr = return_content.stderr_response
- rc = return_content.rc
- try:
- # change the return string to be loadable by json.loads()
- stdout = stdout.replace('/c\\', '/c\\\\')
- stdout = stdout.replace('/a\\', '/a\\\\')
- stdout = stdout.replace('/i\\', '/i\\\\')
- stdout = stdout.replace('$ a\\', '$ a\\\\')
- stdout = stdout.replace('1 i\\', '1 i\\\\')
- if block:
- stdout = stdout.replace(block, quoted_string_output_json(block))
- if ins_aft:
- stdout = stdout.replace(ins_aft, quoted_string_output_json(ins_aft))
- if ins_bef:
- stdout = stdout.replace(ins_bef, quoted_string_output_json(ins_bef))
- # Try to extract information from stdout
- ret = json.loads(stdout)
- ret['cmd'] = ret['cmd'].replace("u'", '"')
-
- result['cmd'] = ret['cmd']
- result['changed'] = ret['changed']
- result['found'] = ret['found']
- # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case
- # That information will be given with 'changed' and 'found'
- if len(stderr):
- result['stderr'] = str(stderr)
+ if double_quotes_exists:
+ rc, cmd = execute_dmod(src, block, quotedString_double_quotes(marker), force, encoding, False, module=module)
result['rc'] = rc
- except Exception:
- messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc)
- if result.get('backup_name'):
- messageDict['backup_name'] = result['backup_name']
- module.fail_json(**messageDict)
+ result['cmd'] = cmd
+ result['changed'] = True if rc == 0 else False
+ stderr = 'Failed to remove entry' if rc != 0 else ""
+ else:
+ return_content = absent(src, marker, encoding, force)
+ # ZOAU 1.3.0 generate false positive working with double quotes (") the call generate distinct return when using and not
+ if not double_quotes_exists:
+ stdout = return_content.stdout_response
+ stderr = return_content.stderr_response
+ rc = return_content.rc
+ stdout = stdout.replace('/d', '\\\\d')
+ try:
+ # Try to extract information from stdout
+ # The triple double quotes is required for special characters (/_) been scape
+ ret = json.loads("""{0}""".format(stdout))
+ except Exception:
+ messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc)
+ if result.get('backup_name'):
+ messageDict['backup_name'] = result['backup_name']
+ module.fail_json(**messageDict)
+
+ result['cmd'] = ret['data']['commands']
+ result['changed'] = ret['data']['changed']
+ result['found'] = ret['data']['found']
+ # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case
+ # That information will be given with 'changed' and 'found'
+ if len(stderr):
+ result['stderr'] = str(stderr)
+ result['rc'] = rc
module.exit_json(**result)
diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py
index b2eda28f5..075162e69 100644
--- a/plugins/modules/zos_copy.py
+++ b/plugins/modules/zos_copy.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019 - 2024
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -97,7 +97,7 @@
- If C(dest) is a new USS file or replacement, the file will be appropriately tagged with
either the system's default locale or the encoding option defined. If the USS file is
a replacement, the user must have write authority to the file either through ownership,
- group or other permissions, else the copy will fail.
+ group or other permissions, else the module will fail.
- If C(dest) is a nonexistent data set, it will be created following the
process outlined here and in the C(volume) option.
- If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of
@@ -118,7 +118,7 @@
be deleted and recreated following the process outlined in the C(volume) option.
- When the C(dest) is an existing VSAM (RRDS), then the source must be an RRDS.
The VSAM (RRDS) will be deleted and recreated following the process outlined
- in the C(volume) option.
+ in the C(volume) option.
- When C(dest) is and existing VSAM (LDS), then source must be an LDS. The
VSAM (LDS) will be deleted and recreated following the process outlined
in the C(volume) option.
@@ -147,7 +147,7 @@
to:
description:
- The encoding to be converted to
- required: true
+ required: false
type: str
tmp_hlq:
description:
@@ -247,6 +247,15 @@
type: bool
default: true
required: false
+ group:
+ description:
+ - Name of the group that will own the file system objects.
+ - When left unspecified, it uses the current group of the current user
+ unless you are root, in which case it can preserve the previous
+ ownership.
+ - This option is only applicable if C(dest) is USS, otherwise ignored.
+ type: str
+ required: false
mode:
description:
- The permission of the destination file or directory.
@@ -265,6 +274,15 @@
the source file.
type: str
required: false
+ owner:
+ description:
+ - Name of the user that should own the filesystem object, as would be
+ passed to the chown command.
+ - When left unspecified, it uses the current user unless you are root,
+ in which case it can preserve the previous ownership.
+ - This option is only applicable if C(dest) is USS, otherwise ignored.
+ type: str
+ required: false
remote_src:
description:
- If set to C(false), the module searches for C(src) at the local machine.
@@ -329,16 +347,16 @@
type: str
required: true
choices:
- - KSDS
- - ESDS
- - RRDS
- - LDS
- - SEQ
- - PDS
- - PDSE
- - MEMBER
- - BASIC
- - LIBRARY
+ - ksds
+ - esds
+ - rrds
+ - lds
+ - seq
+ - pds
+ - pdse
+ - member
+ - basic
+ - library
space_primary:
description:
- If the destination I(dest) data set does not exist , this sets the
@@ -357,27 +375,27 @@
description:
- If the destination data set does not exist, this sets the unit of
measurement to use when defining primary and secondary space.
- - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK).
+ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk).
type: str
choices:
- - K
- - M
- - G
- - CYL
- - TRK
+ - k
+ - m
+ - g
+ - cyl
+ - trk
required: false
record_format:
description:
- If the destination data set does not exist, this sets the format of the
- data set. (e.g C(FB))
- - Choices are case-insensitive.
+ data set. (e.g C(fb))
+ - Choices are case-sensitive.
required: false
choices:
- - FB
- - VB
- - FBA
- - VBA
- - U
+ - fb
+ - vb
+ - fba
+ - vba
+ - u
type: str
record_length:
description:
@@ -399,15 +417,15 @@
key_offset:
description:
- The key offset to use when creating a KSDS data set.
- - I(key_offset) is required when I(type=KSDS).
- - I(key_offset) should only be provided when I(type=KSDS)
+ - I(key_offset) is required when I(type=ksds).
+ - I(key_offset) should only be provided when I(type=ksds)
type: int
required: false
key_length:
description:
- The key length to use when creating a KSDS data set.
- - I(key_length) is required when I(type=KSDS).
- - I(key_length) should only be provided when I(type=KSDS)
+ - I(key_length) is required when I(type=ksds).
+ - I(key_length) should only be provided when I(type=ksds)
type: int
required: false
sms_storage_class:
@@ -494,7 +512,7 @@
zos_copy:
src: /path/to/foo.conf
dest: /etc/foo.conf
- mode: 0644
+ mode: "0644"
group: foo
owner: bar
@@ -624,11 +642,11 @@
remote_src: true
volume: '222222'
dest_data_set:
- type: SEQ
+ type: seq
space_primary: 10
space_secondary: 3
- space_type: K
- record_format: VB
+ space_type: k
+ record_format: vb
record_length: 150
- name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL
@@ -684,7 +702,7 @@
description:
Record format of the dataset.
type: str
- sample: FB
+ sample: fb
record_length:
description:
Record length of the dataset.
@@ -704,21 +722,21 @@
description:
Unit of measurement for space.
type: str
- sample: K
+ sample: k
type:
description:
Type of dataset allocated.
type: str
- sample: PDSE
+ sample: pdse
sample:
{
"block_size": 32760,
- "record_format": "FB",
+ "record_format": "fb",
"record_length": 45,
"space_primary": 2,
"space_secondary": 1,
- "space_type": "K",
- "type": "PDSE"
+ "space_type": "k",
+ "type": "pdse"
}
checksum:
description: SHA256 checksum of the file after running zos_copy.
@@ -808,43 +826,48 @@
"""
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
-)
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import (
- idcams
-)
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
- better_arg_parser, data_set, encode, backup, copy, validation,
-)
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import (
- AnsibleModuleHelper,
-)
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import (
- is_member
-)
-from ansible.module_utils._text import to_bytes, to_native
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.six import PY3
-from re import IGNORECASE
-from hashlib import sha256
import glob
+import math
+import os
import shutil
import stat
-import math
import tempfile
-import os
+import traceback
+from hashlib import sha256
+from re import IGNORECASE
+
+from ansible.module_utils._text import to_bytes, to_native
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six import PY3
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
+ backup, better_arg_parser, copy, data_set, encode, validation)
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import \
+ AnsibleModuleHelper
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import (
+ is_member,
+ is_data_set
+)
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \
+ ZOAUImportError
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import \
+ idcams
if PY3:
- from re import fullmatch
import pathlib
+ from re import fullmatch
else:
from re import match as fullmatch
try:
from zoautil_py import datasets, opercmd
except Exception:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
+ opercmd = ZOAUImportError(traceback.format_exc())
+
+try:
+ from zoautil_py import exceptions as zoau_exceptions
+except ImportError:
+ zoau_exceptions = ZOAUImportError(traceback.format_exc())
class CopyHandler(object):
@@ -890,7 +913,6 @@ def run_command(self, cmd, **kwargs):
def copy_to_seq(
self,
src,
- temp_path,
conv_path,
dest,
src_type
@@ -902,18 +924,24 @@ def copy_to_seq(
Arguments:
src {str} -- Path to USS file or data set name
- temp_path {str} -- Path to the location where the control node
- transferred data to
conv_path {str} -- Path to the converted source file
dest {str} -- Name of destination data set
src_type {str} -- Type of the source
"""
- new_src = conv_path or temp_path or src
+ new_src = conv_path or src
copy_args = dict()
copy_args["options"] = ""
if src_type == 'USS' and self.asa_text:
response = copy.copy_asa_uss2mvs(new_src, dest)
+
+ if response.rc != 0:
+ raise CopyOperationError(
+ msg="Unable to copy source {0} to {1}".format(new_src, dest),
+ rc=response.rc,
+ stdout=response.stdout_response,
+ stderr=response.stderr_response
+ )
else:
# While ASA files are just text files, we do a binary copy
# so dcp doesn't introduce any additional blanks or newlines.
@@ -923,14 +951,15 @@ def copy_to_seq(
if self.force_lock:
copy_args["options"] += " -f"
- response = datasets._copy(new_src, dest, None, **copy_args)
- if response.rc != 0:
- raise CopyOperationError(
- msg="Unable to copy source {0} to {1}".format(new_src, dest),
- rc=response.rc,
- stdout=response.stdout_response,
- stderr=response.stderr_response
- )
+ try:
+ datasets.copy(new_src, dest, **copy_args)
+ except zoau_exceptions.ZOAUException as copy_exception:
+ raise CopyOperationError(
+ msg="Unable to copy source {0} to {1}".format(new_src, dest),
+ rc=copy_exception.response.rc,
+ stdout=copy_exception.response.stdout_response,
+ stderr=copy_exception.response.stderr_response
+ )
def copy_to_vsam(self, src, dest):
"""Copy source VSAM to destination VSAM.
@@ -993,9 +1022,11 @@ def _copy_tree(self, entries, src, dest, dirs_exist_ok=False):
else:
opts = dict()
opts["options"] = ""
- response = datasets._copy(src_name, dest_name, None, **opts)
- if response.rc > 0:
- raise Exception(response.stderr_response)
+
+ try:
+ datasets.copy(src_name, dest_name, **opts)
+ except zoau_exceptions.ZOAUException as copy_exception:
+ raise Exception(copy_exception.response.stderr_response)
shutil.copystat(src_name, dest_name, follow_symlinks=True)
except Exception as err:
raise err
@@ -1018,15 +1049,15 @@ def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False):
entries = list(itr)
return self._copy_tree(entries, src_dir, dest_dir, dirs_exist_ok=dirs_exist_ok)
- def convert_encoding(self, src, temp_path, encoding):
+ def convert_encoding(self, src, encoding, remote_src):
"""Convert encoding for given src
Arguments:
src {str} -- Path to the USS source file or directory
- temp_path {str} -- Path to the location where the control node
- transferred data to
encoding {dict} -- Charsets that the source is to be converted
from and to
+ remote_src {bool} -- Whether the file was already on the remote
+ node or not.
Raises:
CopyOperationError -- When the encoding of a USS file is not
@@ -1038,19 +1069,10 @@ def convert_encoding(self, src, temp_path, encoding):
from_code_set = encoding.get("from")
to_code_set = encoding.get("to")
enc_utils = encode.EncodeUtils()
- new_src = temp_path or src
-
+ new_src = src
if os.path.isdir(new_src):
- if temp_path:
- if src.endswith("/"):
- new_src = "{0}/{1}".format(
- temp_path, os.path.basename(os.path.dirname(src))
- )
- else:
- new_src = "{0}/{1}".format(temp_path,
- os.path.basename(src))
try:
- if not temp_path:
+ if remote_src:
temp_dir = tempfile.mkdtemp()
shutil.copytree(new_src, temp_dir, dirs_exist_ok=True)
new_src = temp_dir
@@ -1068,7 +1090,7 @@ def convert_encoding(self, src, temp_path, encoding):
raise CopyOperationError(msg=str(err))
else:
try:
- if not temp_path:
+ if remote_src:
fd, temp_src = tempfile.mkstemp()
os.close(fd)
shutil.copy(new_src, temp_src)
@@ -1257,24 +1279,23 @@ def copy_to_uss(
src,
dest,
conv_path,
- temp_path,
src_ds_type,
src_member,
member_name,
- force
+ force,
+ content_copy,
):
"""Copy a file or data set to a USS location
Arguments:
src {str} -- The USS source
dest {str} -- Destination file or directory on USS
- temp_path {str} -- Path to the location where the control node
- transferred data to
conv_path {str} -- Path to the converted source file or directory
src_ds_type {str} -- Type of source
src_member {bool} -- Whether src is a data set member
member_name {str} -- The name of the source data set member
force {bool} -- Whether to copy files to an already existing directory
+ content_copy {bool} -- Whether copy is using content option or not.
Returns:
{str} -- Destination where the file was copied to
@@ -1309,11 +1330,11 @@ def copy_to_uss(
if "File exists" not in err:
raise CopyOperationError(msg=to_native(err))
- if os.path.isfile(temp_path or conv_path or src):
- dest = self._copy_to_file(src, dest, conv_path, temp_path)
+ if os.path.isfile(conv_path or src):
+ dest = self._copy_to_file(src, dest, content_copy, conv_path)
changed_files = None
else:
- dest, changed_files = self._copy_to_dir(src, dest, conv_path, temp_path, force)
+ dest, changed_files = self._copy_to_dir(src, dest, conv_path, force)
if self.common_file_args is not None:
mode = self.common_file_args.get("mode")
@@ -1334,14 +1355,13 @@ def copy_to_uss(
self.module.set_owner_if_different(dest, owner, False)
return dest
- def _copy_to_file(self, src, dest, conv_path, temp_path):
+ def _copy_to_file(self, src, dest, content_copy, conv_path):
"""Helper function to copy a USS src to USS dest.
Arguments:
src {str} -- USS source file path
dest {str} -- USS dest file path
- temp_path {str} -- Path to the location where the control node
- transferred data to
+ content_copy {bool} -- Whether copy is using content option or not.
conv_path {str} -- Path to the converted source file or directory
Raises:
@@ -1350,25 +1370,27 @@ def _copy_to_file(self, src, dest, conv_path, temp_path):
Returns:
{str} -- Destination where the file was copied to
"""
- src_path = os.path.basename(src) if src else "inline_copy"
+ src_path = os.path.basename(src) if not content_copy else "inline_copy"
if os.path.isdir(dest):
dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path))
-
- new_src = temp_path or conv_path or src
+ new_src = conv_path or src
try:
if self.is_binary:
copy.copy_uss2uss_binary(new_src, dest)
else:
opts = dict()
opts["options"] = ""
- response = datasets._copy(new_src, dest, None, **opts)
- if response.rc > 0:
- raise Exception(response.stderr_response)
+ datasets.copy(new_src, dest, **opts)
shutil.copystat(new_src, dest, follow_symlinks=True)
# shutil.copy(new_src, dest)
if self.executable:
status = os.stat(dest)
os.chmod(dest, status.st_mode | stat.S_IEXEC)
+ except zoau_exceptions.ZOAUException as err:
+ raise CopyOperationError(
+ msg="Unable to copy file {0} to {1}".format(new_src, dest),
+ stderr=err.response.stderr_response,
+ )
except OSError as err:
raise CopyOperationError(
msg="Destination {0} is not writable".format(dest),
@@ -1386,7 +1408,6 @@ def _copy_to_dir(
src_dir,
dest_dir,
conv_path,
- temp_path,
force
):
"""Helper function to copy a USS directory to another USS directory.
@@ -1397,8 +1418,6 @@ def _copy_to_dir(
src_dir {str} -- USS source directory
dest_dir {str} -- USS dest directory
conv_path {str} -- Path to the converted source directory
- temp_path {str} -- Path to the location where the control node
- transferred data to
force {bool} -- Whether to copy files to an already existing directory
Raises:
@@ -1410,14 +1429,7 @@ def _copy_to_dir(
that got copied.
"""
copy_directory = True if not src_dir.endswith("/") else False
-
- if temp_path:
- temp_path = "{0}/{1}".format(
- temp_path,
- os.path.basename(os.path.normpath(src_dir))
- )
-
- new_src_dir = temp_path or conv_path or src_dir
+ new_src_dir = conv_path or src_dir
new_src_dir = os.path.normpath(new_src_dir)
dest = dest_dir
changed_files, original_permissions = self._get_changed_files(new_src_dir, dest_dir, copy_directory)
@@ -1554,12 +1566,21 @@ def _mvs_copy_to_uss(
if src_member or src_ds_type in data_set.DataSet.MVS_SEQ:
if self.asa_text:
response = copy.copy_asa_mvs2uss(src, dest)
+ rc = response.rc
elif self.executable:
- response = datasets._copy(src, dest, alias=True, executable=True)
+ try:
+ rc = datasets.copy(src, dest, alias=True, executable=True)
+ except zoau_exceptions.ZOAUException as copy_exception:
+ response = copy_exception.response
+ rc = response.rc
else:
- response = datasets._copy(src, dest)
+ try:
+ rc = datasets.copy(src, dest)
+ except zoau_exceptions.ZOAUException as copy_exception:
+ response = copy_exception.response
+ rc = response.rc
- if response.rc != 0:
+ if rc != 0:
raise CopyOperationError(
msg="Error while copying source {0} to {1}".format(src, dest),
rc=response.rc,
@@ -1568,14 +1589,14 @@ def _mvs_copy_to_uss(
)
else:
if self.executable:
- response = datasets._copy(src, dest, None, alias=True, executable=True)
-
- if response.rc != 0:
+ try:
+ datasets.copy(src, dest, alias=True, executable=True)
+ except zoau_exceptions.ZOAUException as copy_exception:
raise CopyOperationError(
msg="Error while copying source {0} to {1}".format(src, dest),
- rc=response.rc,
- stdout=response.stdout_response,
- stderr=response.stderr_response
+ rc=copy_exception.response.rc,
+ stdout=copy_exception.response.stdout_response,
+ stderr=copy_exception.response.stderr_response
)
elif self.asa_text:
response = copy.copy_asa_pds2uss(src, dest)
@@ -1636,7 +1657,6 @@ def __init__(
def copy_to_pdse(
self,
src,
- temp_path,
conv_path,
dest,
src_ds_type,
@@ -1651,8 +1671,6 @@ def copy_to_pdse(
Arguments:
src {str} -- Path to USS file/directory or data set name.
- temp_path {str} -- Path to the location where the control node
- transferred data to.
conv_path {str} -- Path to the converted source file/directory.
dest {str} -- Name of destination data set.
src_ds_type {str} -- The type of source.
@@ -1660,7 +1678,7 @@ def copy_to_pdse(
dest_member {str, optional} -- Name of destination member in data set.
encoding {dict, optional} -- Dictionary with encoding options.
"""
- new_src = conv_path or temp_path or src
+ new_src = conv_path or src
src_members = []
dest_members = []
@@ -1725,7 +1743,7 @@ def copy_to_pdse(
# Copy section
if src_ds_type == "USS" or self.asa_text or len(src_members) == 1:
"""
- USS -> MVS : Was kept on member by member basis bc file names longer that 8
+ USS -> MVS : Was kept on member by member basis bc file names longer than 8
characters will throw an error when copying to a PDS, because of the member name
character limit.
MVS -> MVS (asa only): This has to be copied on member by member basis bc OPUT
@@ -1790,6 +1808,7 @@ def copy_to_member(
if src_type == 'USS' and self.asa_text:
response = copy.copy_asa_uss2mvs(src, dest)
+ rc, out, err = response.rc, response.stdout_response, response.stderr_response
else:
# While ASA files are just text files, we do a binary copy
# so dcp doesn't introduce any additional blanks or newlines.
@@ -1799,8 +1818,14 @@ def copy_to_member(
if self.force_lock:
opts["options"] += " -f"
- response = datasets._copy(src, dest, alias=self.aliases, executable=self.executable, **opts)
- rc, out, err = response.rc, response.stdout_response, response.stderr_response
+ try:
+ rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, **opts)
+ out = ""
+ err = ""
+ except zoau_exceptions.ZOAUException as copy_exception:
+ rc = copy_exception.response.rc
+ out = copy_exception.response.stdout_response
+ err = copy_exception.response.stderr_response
return dict(
rc=rc,
@@ -1857,8 +1882,8 @@ def dump_data_set_member_to_file(data_set_member, is_binary):
if is_binary:
copy_args["options"] = "-B"
- response = datasets._copy(data_set_member, temp_path, None, **copy_args)
- if response.rc != 0 or response.stderr_response:
+ response = datasets.copy(data_set_member, temp_path, **copy_args)
+ if response != 0:
raise DataSetMemberAttributeError(data_set_member)
return temp_path
@@ -2320,7 +2345,7 @@ def get_attributes_of_any_dataset_created(
volume=volume
)
else:
- src_attributes = datasets.listing(src_name)[0]
+ src_attributes = datasets.list_datasets(src_name)[0]
size = int(src_attributes.total_space)
params = get_data_set_attributes(
dest,
@@ -2402,8 +2427,8 @@ def allocate_destination_data_set(
try:
# Dumping the member into a file in USS to compute the record length and
# size for the new data set.
- src_attributes = datasets.listing(src_name)[0]
- record_length = int(src_attributes.lrecl)
+ src_attributes = datasets.list_datasets(src_name)[0]
+ record_length = int(src_attributes.record_length)
temp_dump = dump_data_set_member_to_file(src, is_binary)
create_seq_dataset_from_file(
temp_dump,
@@ -2422,11 +2447,11 @@ def allocate_destination_data_set(
if src_ds_type in data_set.DataSet.MVS_PARTITIONED:
data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume)
elif src_ds_type in data_set.DataSet.MVS_SEQ:
- src_attributes = datasets.listing(src_name)[0]
+ src_attributes = datasets.list_datasets(src_name)[0]
# The size returned by listing is in bytes.
size = int(src_attributes.total_space)
- record_format = src_attributes.recfm
- record_length = int(src_attributes.lrecl)
+ record_format = src_attributes.record_format
+ record_length = int(src_attributes.record_length)
dest_params = get_data_set_attributes(
dest,
size,
@@ -2512,8 +2537,8 @@ def allocate_destination_data_set(
asa_text,
volume
)
- dest_attributes = datasets.listing(dest)[0]
- record_format = dest_attributes.recfm
+ dest_attributes = datasets.list_datasets(dest)[0]
+ record_format = dest_attributes.record_format
dest_params["type"] = dest_ds_type
dest_params["record_format"] = record_format
return True, dest_params
@@ -2628,15 +2653,10 @@ def run_module(module, arg_def):
owner = module.params.get('owner')
encoding = module.params.get('encoding')
volume = module.params.get('volume')
- is_uss = module.params.get('is_uss')
- is_pds = module.params.get('is_pds')
- is_src_dir = module.params.get('is_src_dir')
- is_mvs_dest = module.params.get('is_mvs_dest')
- temp_path = module.params.get('temp_path')
- src_member = module.params.get('src_member')
tmphlq = module.params.get('tmp_hlq')
force = module.params.get('force')
force_lock = module.params.get('force_lock')
+ content = module.params.get('content')
dest_data_set = module.params.get('dest_data_set')
if dest_data_set:
@@ -2644,6 +2664,13 @@ def run_module(module, arg_def):
dest_data_set["volumes"] = [volume]
copy_member = is_member(dest)
+ # This section we initialize different variables
+ # that we used to pass from the action plugin.
+ is_src_dir = os.path.isdir(src)
+ is_uss = "/" in dest
+ is_mvs_dest = is_data_set(dest)
+ is_pds = is_src_dir and is_mvs_dest
+ src_member = is_member(src)
# ********************************************************************
# When copying to and from a data set member, 'dest' or 'src' will be
@@ -2690,18 +2717,17 @@ def run_module(module, arg_def):
# data sets with record format 'FBA' or 'VBA'.
src_has_asa_chars = dest_has_asa_chars = False
try:
- # If temp_path, the plugin has copied a file from the controller to USS.
- if temp_path or "/" in src:
+ if "/" in src:
src_ds_type = "USS"
- if remote_src and os.path.isdir(src):
+ if os.path.isdir(src):
is_src_dir = True
# When the destination is a dataset, we'll normalize the source
# file to UTF-8 for the record length computation as Python
# generally uses UTF-8 as the default encoding.
if not is_binary and not is_uss and not executable:
- new_src = temp_path or src
+ new_src = src
new_src = os.path.normpath(new_src)
# Normalizing encoding when src is a USS file (only).
encode_utils = encode.EncodeUtils()
@@ -2735,8 +2761,8 @@ def run_module(module, arg_def):
src_ds_type = data_set.DataSet.data_set_type(src_name)
if src_ds_type not in data_set.DataSet.MVS_VSAM:
- src_attributes = datasets.listing(src_name)[0]
- if src_attributes.recfm == 'FBA' or src_attributes.recfm == 'VBA':
+ src_attributes = datasets.list_datasets(src_name)[0]
+ if src_attributes.record_format == 'FBA' or src_attributes.record_format == 'VBA':
src_has_asa_chars = True
else:
raise NonExistentSourceError(src)
@@ -2758,9 +2784,8 @@ def run_module(module, arg_def):
if is_uss:
dest_ds_type = "USS"
if src_ds_type == "USS" and not is_src_dir and (dest.endswith("/") or os.path.isdir(dest)):
- src_basename = os.path.basename(src) if src else "inline_copy"
+ src_basename = os.path.basename(src) if not content else "inline_copy"
dest = os.path.normpath("{0}/{1}".format(dest, src_basename))
-
if dest.startswith("//"):
dest = dest.replace("//", "/")
@@ -2777,7 +2802,7 @@ def run_module(module, arg_def):
# dest_data_set.type overrides `dest_ds_type` given precedence rules
if dest_data_set and dest_data_set.get("type"):
- dest_ds_type = dest_data_set.get("type")
+ dest_ds_type = dest_data_set.get("type").upper()
elif executable:
""" When executable is selected and dest_exists is false means an executable PDSE was copied to remote,
so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY.
@@ -2785,22 +2810,13 @@ def run_module(module, arg_def):
and LIBRARY is not in MVS_PARTITIONED frozen set."""
dest_ds_type = "PDSE"
- if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'):
+ if dest_data_set and (dest_data_set.get('record_format', '') == 'fba' or dest_data_set.get('record_format', '') == 'vba'):
dest_has_asa_chars = True
elif not dest_exists and asa_text:
dest_has_asa_chars = True
elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM:
- dest_attributes = datasets.listing(dest_name)[0]
- if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA':
- dest_has_asa_chars = True
-
- if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'):
- dest_has_asa_chars = True
- elif not dest_exists and asa_text:
- dest_has_asa_chars = True
- elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM:
- dest_attributes = datasets.listing(dest_name)[0]
- if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA':
+ dest_attributes = datasets.list_datasets(dest_name)[0]
+ if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA':
dest_has_asa_chars = True
if dest_ds_type in data_set.DataSet.MVS_PARTITIONED:
@@ -2809,12 +2825,7 @@ def run_module(module, arg_def):
if copy_member:
dest_member_exists = dest_exists and data_set.DataSet.data_set_member_exists(dest)
elif src_ds_type == "USS":
- if temp_path:
- root_dir = "{0}/{1}".format(temp_path, os.path.basename(os.path.normpath(src)))
- root_dir = os.path.normpath(root_dir)
- else:
- root_dir = src
-
+ root_dir = src
dest_member_exists = dest_exists and data_set.DataSet.files_in_data_set_members(root_dir, dest)
elif src_ds_type in data_set.DataSet.MVS_PARTITIONED:
dest_member_exists = dest_exists and data_set.DataSet.data_set_shared_members(src, dest)
@@ -2955,17 +2966,13 @@ def run_module(module, arg_def):
# original one. This change applies only to the
# allocate_destination_data_set call.
if converted_src:
- if remote_src:
- original_src = src
- src = converted_src
- else:
- original_temp = temp_path
- temp_path = converted_src
+ original_src = src
+ src = converted_src
try:
if not is_uss:
res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set(
- temp_path or src,
+ src,
dest_name, src_ds_type,
dest_ds_type,
dest_exists,
@@ -2978,20 +2985,14 @@ def run_module(module, arg_def):
)
except Exception as err:
if converted_src:
- if remote_src:
- src = original_src
- else:
- temp_path = original_temp
+ src = original_src
module.fail_json(
msg="Unable to allocate destination data set: {0}".format(str(err)),
dest_exists=dest_exists
)
if converted_src:
- if remote_src:
- src = original_src
- else:
- temp_path = original_temp
+ src = original_src
# ********************************************************************
# Encoding conversion is only valid if the source is a local file,
@@ -3012,7 +3013,7 @@ def run_module(module, arg_def):
# if is_mvs_dest:
# encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET
- conv_path = copy_handler.convert_encoding(src, temp_path, encoding)
+ conv_path = copy_handler.convert_encoding(src, encoding, remote_src)
# ------------------------------- o -----------------------------------
# Copy to USS file or directory
@@ -3036,17 +3037,17 @@ def run_module(module, arg_def):
src,
dest,
conv_path,
- temp_path,
src_ds_type,
src_member,
member_name,
- force
+ force,
+ bool(content)
)
res_args['size'] = os.stat(dest).st_size
remote_checksum = dest_checksum = None
try:
- remote_checksum = get_file_checksum(temp_path or src)
+ remote_checksum = get_file_checksum(src)
dest_checksum = get_file_checksum(dest)
if validate:
@@ -3068,12 +3069,11 @@ def run_module(module, arg_def):
elif dest_ds_type in data_set.DataSet.MVS_SEQ:
# TODO: check how ASA behaves with this
if src_ds_type == "USS" and not is_binary:
- new_src = conv_path or temp_path or src
+ new_src = conv_path or src
conv_path = normalize_line_endings(new_src, encoding)
copy_handler.copy_to_seq(
src,
- temp_path,
conv_path,
dest,
src_ds_type
@@ -3085,8 +3085,6 @@ def run_module(module, arg_def):
# Copy to PDS/PDSE
# ---------------------------------------------------------------------
elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY":
- if not remote_src and not copy_member and os.path.isdir(temp_path):
- temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src)))
pdse_copy_handler = PDSECopyHandler(
module,
@@ -3100,7 +3098,6 @@ def run_module(module, arg_def):
pdse_copy_handler.copy_to_pdse(
src,
- temp_path,
conv_path,
dest_name,
src_ds_type,
@@ -3131,7 +3128,7 @@ def run_module(module, arg_def):
)
)
- return res_args, temp_path, conv_path
+ return res_args, conv_path
def main():
@@ -3153,7 +3150,7 @@ def main():
),
"to": dict(
type='str',
- required=True,
+ required=False,
)
}
),
@@ -3171,8 +3168,8 @@ def main():
options=dict(
type=dict(
type='str',
- choices=['BASIC', 'KSDS', 'ESDS', 'RRDS',
- 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER', 'LIBRARY'],
+ choices=['basic', 'ksds', 'esds', 'rrds',
+ 'lds', 'seq', 'pds', 'pdse', 'member', 'library'],
required=True,
),
space_primary=dict(
@@ -3181,12 +3178,12 @@ def main():
type='int', required=False),
space_type=dict(
type='str',
- choices=['K', 'M', 'G', 'CYL', 'TRK'],
+ choices=['k', 'm', 'g', 'cyl', 'trk'],
required=False,
),
record_format=dict(
type='str',
- choices=["FB", "VB", "FBA", "VBA", "U"],
+ choices=["fb", "vb", "fba", "vba", "u"],
required=False
),
record_length=dict(type='int', required=False),
@@ -3223,14 +3220,6 @@ def main():
auto_reload=dict(type='bool', default=False),
)
),
- is_uss=dict(type='bool'),
- is_pds=dict(type='bool'),
- is_src_dir=dict(type='bool'),
- is_mvs_dest=dict(type='bool'),
- size=dict(type='int'),
- temp_path=dict(type='str'),
- src_member=dict(type='bool'),
- local_charset=dict(type='str'),
force=dict(type='bool', default=False),
force_lock=dict(type='bool', default=False),
mode=dict(type='str', required=False),
@@ -3301,15 +3290,16 @@ def main():
)
if (
- not module.params.get("encoding")
+ not module.params.get("encoding").get("to")
and not module.params.get("remote_src")
and not module.params.get("is_binary")
and not module.params.get("executable")
):
- module.params["encoding"] = {
- "from": module.params.get("local_charset"),
- "to": encode.Defaults.get_default_system_charset(),
- }
+ module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset()
+ elif (
+ not module.params.get("encoding").get("to")
+ ):
+ module.params["encoding"] = None
if module.params.get("encoding"):
module.params.update(
@@ -3325,15 +3315,15 @@ def main():
)
)
- res_args = temp_path = conv_path = None
+ res_args = conv_path = None
try:
- res_args, temp_path, conv_path = run_module(module, arg_def)
+ res_args, conv_path = run_module(module, arg_def)
module.exit_json(**res_args)
except CopyOperationError as err:
cleanup([])
module.fail_json(**(err.json_args))
finally:
- cleanup([temp_path, conv_path])
+ cleanup([conv_path])
class EncodingConversionError(Exception):
diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py
index 73af4acf1..4eae68733 100644
--- a/plugins/modules/zos_data_set.py
+++ b/plugins/modules/zos_data_set.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -24,14 +24,16 @@
description:
- Create, delete and set attributes of data sets.
- When forcing data set replacement, contents will not be preserved.
-author: "Blake Becker (@blakeinate)"
+author:
+ - "Blake Becker (@blakeinate)"
+ - "Rich Parker (@richp405)"
options:
name:
description:
- The name of the data set being managed. (e.g C(USER.TEST))
- If I(name) is not provided, a randomized data set name will be generated
with the HLQ matching the module-runners username.
- - Required if I(type=MEMBER) or I(state!=present) and not using I(batch).
+ - Required if I(type=member) or I(state!=present) and not using I(batch).
type: str
required: false
state:
@@ -44,7 +46,7 @@
If I(state=absent) and the data set does exist on the managed node,
remove the data set, module completes successfully with I(changed=True).
- >
- If I(state=absent) and I(type=MEMBER) and I(force=True), the data set
+ If I(state=absent) and I(type=member) and I(force=True), the data set
will be opened with I(DISP=SHR) such that the entire data set can be
accessed by other processes while the specified member is deleted.
- >
@@ -75,7 +77,7 @@
If I(state=present) and I(replace=False) and the data set is present
on the managed node, no action taken, module completes successfully with I(changed=False).
- >
- If I(state=present) and I(type=MEMBER) and the member does not exist in the data set,
+ If I(state=present) and I(type=member) and the member does not exist in the data set,
create a member formatted to store data, module completes successfully with I(changed=True).
Note, a PDSE does not allow a mixture of formats such that there is
executables (program objects) and data. The member created is formatted to store data,
@@ -107,26 +109,26 @@
- uncataloged
type:
description:
- - The data set type to be used when creating a data set. (e.g C(pdse))
- - C(MEMBER) expects to be used with an existing partitioned data set.
- - Choices are case-insensitive.
+ - The data set type to be used when creating a data set. (e.g C(pdse)).
+ - C(member) expects to be used with an existing partitioned data set.
+ - Choices are case-sensitive.
required: false
type: str
choices:
- - KSDS
- - ESDS
- - RRDS
- - LDS
- - SEQ
- - PDS
- - PDSE
- - LIBRARY
- - BASIC
- - LARGE
- - MEMBER
- - HFS
- - ZFS
- default: PDS
+ - ksds
+ - esds
+ - rrds
+ - lds
+ - seq
+ - pds
+ - pdse
+ - library
+ - basic
+ - large
+ - member
+ - hfs
+ - zfs
+ default: pds
space_primary:
description:
- The amount of primary space to allocate for the dataset.
@@ -144,33 +146,35 @@
space_type:
description:
- The unit of measurement to use when defining primary and secondary space.
- - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK).
+ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk).
type: str
choices:
- - K
- - M
- - G
- - CYL
- - TRK
+ - k
+ - m
+ - g
+ - cyl
+ - trk
required: false
- default: M
+ default: m
record_format:
description:
- The format of the data set. (e.g C(FB))
- - Choices are case-insensitive.
- - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS)
+ - Choices are case-sensitive.
+ - When I(type=ksds), I(type=esds), I(type=rrds), I(type=lds) or I(type=zfs)
then I(record_format=None), these types do not have a default
I(record_format).
required: false
choices:
- - FB
- - VB
- - FBA
- - VBA
- - U
- - F
+ - fb
+ - vb
+ - fba
+ - vba
+ - u
+ - f
type: str
- default: FB
+ default: fb
+ aliases:
+ - format
sms_storage_class:
description:
- The storage class for an SMS-managed dataset.
@@ -179,6 +183,8 @@
- Note that all non-linear VSAM datasets are SMS-managed.
type: str
required: false
+ aliases:
+ - data_class
sms_data_class:
description:
- The data class for an SMS-managed dataset.
@@ -215,15 +221,15 @@
key_offset:
description:
- The key offset to use when creating a KSDS data set.
- - I(key_offset) is required when I(type=KSDS).
- - I(key_offset) should only be provided when I(type=KSDS)
+ - I(key_offset) is required when I(type=ksds).
+ - I(key_offset) should only be provided when I(type=ksds)
type: int
required: false
key_length:
description:
- The key length to use when creating a KSDS data set.
- - I(key_length) is required when I(type=KSDS).
- - I(key_length) should only be provided when I(type=KSDS)
+ - I(key_length) is required when I(type=ksds).
+ - I(key_length) should only be provided when I(type=ksds)
type: int
required: false
volumes:
@@ -275,7 +281,7 @@
- The I(force=True) option enables sharing of data sets through the
disposition I(DISP=SHR).
- The I(force=True) only applies to data set members when I(state=absent)
- and I(type=MEMBER).
+ and I(type=member).
type: bool
required: false
default: false
@@ -291,7 +297,7 @@
- The name of the data set being managed. (e.g C(USER.TEST))
- If I(name) is not provided, a randomized data set name will be generated
with the HLQ matching the module-runners username.
- - Required if I(type=MEMBER) or I(state!=present)
+ - Required if I(type=member) or I(state!=present)
type: str
required: false
state:
@@ -304,7 +310,7 @@
If I(state=absent) and the data set does exist on the managed node,
remove the data set, module completes successfully with I(changed=True).
- >
- If I(state=absent) and I(type=MEMBER) and I(force=True), the data
+ If I(state=absent) and I(type=member) and I(force=True), the data
set will be opened with I(DISP=SHR) such that the entire data set
can be accessed by other processes while the specified member is
deleted.
@@ -336,7 +342,7 @@
If I(state=present) and I(replace=False) and the data set is present
on the managed node, no action taken, module completes successfully with I(changed=False).
- >
- If I(state=present) and I(type=MEMBER) and the member does not exist in the data set,
+ If I(state=present) and I(type=member) and the member does not exist in the data set,
create a member formatted to store data, module completes successfully with I(changed=True).
Note, a PDSE does not allow a mixture of formats such that there is
executables (program objects) and data. The member created is formatted to store data,
@@ -368,26 +374,26 @@
- uncataloged
type:
description:
- - The data set type to be used when creating a data set. (e.g C(PDSE))
- - C(MEMBER) expects to be used with an existing partitioned data set.
- - Choices are case-insensitive.
+ - The data set type to be used when creating a data set. (e.g C(pdse))
+ - C(member) expects to be used with an existing partitioned data set.
+ - Choices are case-sensitive.
required: false
type: str
choices:
- - KSDS
- - ESDS
- - RRDS
- - LDS
- - SEQ
- - PDS
- - PDSE
- - LIBRARY
- - BASIC
- - LARGE
- - MEMBER
- - HFS
- - ZFS
- default: PDS
+ - ksds
+ - esds
+ - rrds
+ - lds
+ - seq
+ - pds
+ - pdse
+ - library
+ - basic
+ - large
+ - member
+ - hfs
+ - zfs
+ default: pds
space_primary:
description:
- The amount of primary space to allocate for the dataset.
@@ -405,33 +411,35 @@
space_type:
description:
- The unit of measurement to use when defining primary and secondary space.
- - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK).
+ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk).
type: str
choices:
- - K
- - M
- - G
- - CYL
- - TRK
+ - k
+ - m
+ - g
+ - cyl
+ - trk
required: false
- default: M
+ default: m
record_format:
description:
- The format of the data set. (e.g C(FB))
- - Choices are case-insensitive.
- - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or
- I(type=ZFS) then I(record_format=None), these types do not have a
+ - Choices are case-sensitive.
+ - When I(type=ksds), I(type=esds), I(type=rrds), I(type=lds) or
+ I(type=zfs) then I(record_format=None), these types do not have a
default I(record_format).
required: false
choices:
- - FB
- - VB
- - FBA
- - VBA
- - U
- - F
+ - fb
+ - vb
+ - fba
+ - vba
+ - u
+ - f
type: str
- default: FB
+ default: fb
+ aliases:
+ - format
sms_storage_class:
description:
- The storage class for an SMS-managed dataset.
@@ -440,6 +448,8 @@
- Note that all non-linear VSAM datasets are SMS-managed.
type: str
required: false
+ aliases:
+ - data_class
sms_data_class:
description:
- The data class for an SMS-managed dataset.
@@ -476,15 +486,15 @@
key_offset:
description:
- The key offset to use when creating a KSDS data set.
- - I(key_offset) is required when I(type=KSDS).
- - I(key_offset) should only be provided when I(type=KSDS)
+ - I(key_offset) is required when I(type=ksds).
+ - I(key_offset) should only be provided when I(type=ksds)
type: int
required: false
key_length:
description:
- The key length to use when creating a KSDS data set.
- - I(key_length) is required when I(type=KSDS).
- - I(key_length) should only be provided when I(type=KSDS)
+ - I(key_length) is required when I(type=ksds).
+ - I(key_length) should only be provided when I(type=ksds)
type: int
required: false
volumes:
@@ -529,7 +539,7 @@
- The I(force=True) option enables sharing of data sets through the
disposition I(DISP=SHR).
- The I(force=True) only applies to data set members when
- I(state=absent) and I(type=MEMBER).
+ I(state=absent) and I(type=member).
type: bool
required: false
default: false
@@ -547,7 +557,7 @@
name: someds.name.here
type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: fba
record_length: 25
@@ -556,21 +566,21 @@
name: someds.name.here
type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: u
record_length: 25
- replace: yes
+ replace: true
- name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found.
zos_data_set:
name: someds.name.here
type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: u
record_length: 25
volumes: "222222"
- replace: yes
+ replace: true
- name: Create an ESDS data set if it does not exist
zos_data_set:
@@ -604,43 +614,43 @@
- name: Write a member to an existing PDS; replace if member exists
zos_data_set:
name: someds.name.here(mydata)
- type: MEMBER
- replace: yes
+ type: member
+ replace: true
- name: Write a member to an existing PDS; do not replace if member exists
zos_data_set:
name: someds.name.here(mydata)
- type: MEMBER
+ type: member
- name: Remove a member from an existing PDS
zos_data_set:
name: someds.name.here(mydata)
state: absent
- type: MEMBER
+ type: member
- name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR
zos_data_set:
name: someds.name.here(mydata)
state: absent
- type: MEMBER
- force: yes
+ type: member
+ force: true
- name: Create multiple partitioned data sets and add one or more members to each
zos_data_set:
batch:
- - name: someds.name.here1
- type: PDS
+ - name: someds.name.here1
+ type: pds
space_primary: 5
- space_type: M
+ space_type: m
record_format: fb
- replace: yes
+ replace: true
- name: someds.name.here1(member1)
- type: MEMBER
+ type: member
- name: someds.name.here2(member1)
- type: MEMBER
- replace: yes
+ type: member
+ replace: true
- name: someds.name.here2(member2)
- type: MEMBER
+ type: member
- name: Catalog a data set present on volume 222222 if it is uncataloged.
zos_data_set:
@@ -679,44 +689,44 @@
# CONSTANTS
DATA_SET_TYPES = [
- "KSDS",
- "ESDS",
- "RRDS",
- "LDS",
- "SEQ",
- "PDS",
- "PDSE",
- "BASIC",
- "LARGE",
- "LIBRARY",
- "MEMBER",
- "HFS",
- "ZFS",
+ "ksds",
+ "esds",
+ "rrds",
+ "lds",
+ "seq",
+ "pds",
+ "pdse",
+ "basic",
+ "large",
+ "library",
+ "member",
+ "hfs",
+ "zfs",
]
DATA_SET_FORMATS = [
- "FB",
- "VB",
- "FBA",
- "VBA",
- "U",
- "F",
+ "fb",
+ "vb",
+ "fba",
+ "vba",
+ "u",
+ "f",
]
DEFAULT_RECORD_LENGTHS = {
- "FB": 80,
- "FBA": 80,
- "VB": 137,
- "VBA": 137,
- "U": 0,
+ "fb": 80,
+ "fba": 80,
+ "vb": 137,
+ "vba": 137,
+ "u": 0,
}
DATA_SET_TYPES_VSAM = [
- "KSDS",
- "ESDS",
- "RRDS",
- "LDS",
- "ZFS",
+ "ksds",
+ "esds",
+ "rrds",
+ "lds",
+ "zfs",
]
# ------------- Functions to validate arguments ------------- #
@@ -726,20 +736,27 @@ def get_individual_data_set_parameters(params):
"""Builds a list of data set parameters
to be used in future operations.
- Arguments:
- params {dict} -- The parameters from
+ Parameters
+ ----------
+ params : dict
+ The parameters from
Ansible's AnsibleModule object module.params.
- Raises:
- ValueError: Raised if top-level parameters "name"
- and "batch" are both provided.
- ValueError: Raised if neither top-level parameters "name"
- or "batch" are provided.
-
- Returns:
- [list] -- A list of dicts where each list item
+ Returns
+ -------
+ Union[dict]
+ A list of dicts where each list item
represents one data set. Each dictionary holds the parameters
(passed to the zos_data_set module) for the data set which it represents.
+
+ Raises
+ ------
+ ValueError
+ Raised if top-level parameters "name"
+ and "batch" are both provided.
+ ValueError
+ Raised if neither top-level parameters "name"
+ or "batch" are provided.
"""
if params.get("name") and params.get("batch"):
raise ValueError(
@@ -759,20 +776,44 @@ def get_individual_data_set_parameters(params):
# * can be replaced by built-in
def data_set_name(contents, dependencies):
"""Validates provided data set name(s) are valid.
- Returns a list containing the name(s) of data sets."""
+ Returns a list containing the name(s) of data sets.
+
+ Parameters
+ ----------
+ contents : str
+ Name of the dataset.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the dependencies have a batch.
+ str
+ The data set name.
+
+ Raises
+ ------
+ ValueError
+ Data set name must be provided.
+ ValueError
+ Data set and member name must be provided.
+ ValueError
+ A value is invalid.
+ """
if dependencies.get("batch"):
return None
if contents is None:
if dependencies.get("state") != "present":
raise ValueError('Data set name must be provided when "state!=present"')
- if dependencies.get("type") != "MEMBER":
+ if dependencies.get("type") != "member":
tmphlq = dependencies.get("tmp_hlq")
if tmphlq is None:
tmphlq = ""
contents = DataSet.temp_name(tmphlq)
else:
raise ValueError(
- 'Data set and member name must be provided when "type=MEMBER"'
+ 'Data set and member name must be provided when "type=member"'
)
dsname = str(contents)
if not re.fullmatch(
@@ -786,7 +827,7 @@ def data_set_name(contents, dependencies):
dsname,
re.IGNORECASE,
)
- and dependencies.get("type") == "MEMBER"
+ and dependencies.get("type") == "member"
):
raise ValueError(
"Value {0} is invalid for data set argument.".format(dsname)
@@ -797,15 +838,33 @@ def data_set_name(contents, dependencies):
# * dependent on state
def space_type(contents, dependencies):
"""Validates provided data set unit of space is valid.
- Returns the unit of space."""
+ Returns the unit of space.
+
+ Parameters
+ ----------
+ contents : str
+ Unit of space of the dataset.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ str
+ The data set unit of space.
+
+ Raises
+ ------
+ ValueError
+ Value provided is invalid.
+"""
if dependencies.get("state") == "absent":
- return None
+ return "m"
if contents is None:
return None
- match = re.fullmatch(r"(M|G|K|TRK|CYL)", contents, re.IGNORECASE)
+ match = re.fullmatch(r"(m|g|k|trk|cyl)", contents, re.IGNORECASE)
if not match:
raise ValueError(
- 'Value {0} is invalid for space_type argument. Valid space types are "K", "M", "G", "TRK" or "CYL".'.format(
+ 'Value {0} is invalid for space_type argument. Valid space types are "k", "m", "g", "trk" or "cyl".'.format(
contents
)
)
@@ -815,7 +874,27 @@ def space_type(contents, dependencies):
# * dependent on state
def sms_class(contents, dependencies):
"""Validates provided sms class is of valid length.
- Returns the sms class."""
+ Returns the sms class.
+
+ Parameters
+ ----------
+ contents : str
+ Name of the sms class.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the state is absent or contents is none.
+ str
+ The sms class set name.
+
+ Raises
+ ------
+ ValueError
+ Value is invalid.
+ """
if dependencies.get("state") == "absent" or contents is None:
return None
if len(contents) < 1 or len(contents) > 8:
@@ -830,7 +909,22 @@ def sms_class(contents, dependencies):
def valid_when_state_present(contents, dependencies):
"""Ensures no arguments that are invalid when state!=present
- are allowed."""
+ are allowed.
+
+ Parameters
+ ----------
+ contents : str
+ Arguments to be validated.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the state is absent or contents is none.
+ str
+ Valid arguments.
+ """
if dependencies.get("state") == "absent" or contents is None:
return None
return contents
@@ -840,7 +934,27 @@ def valid_when_state_present(contents, dependencies):
# * dependent on format
def record_length(contents, dependencies):
"""Validates provided record length is valid.
- Returns the record length as integer."""
+ Returns the record length as integer.
+
+ Parameters
+ ----------
+ contents : str
+ Length of the dataset.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the state is absent or contents is none.
+ str
+ The data set length.
+
+ Raises
+ ------
+ ValueError
+ Value is invalid.
+ """
if dependencies.get("state") == "absent":
return None
contents = (
@@ -863,29 +977,65 @@ def record_length(contents, dependencies):
# * dependent on record_length
def record_format(contents, dependencies):
"""Validates data set format is valid.
- Returns uppercase data set format."""
+ Returns uppercase data set format.
+
+ Parameters
+ ----------
+ contents : str
+ Format of the dataset.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ str
+ The data set format in uppercase. Default is 'FB'.
+
+ Raises
+ ------
+ ValueError
+ Value is invalid.
+ """
if dependencies.get("state") == "absent":
- return None
+ return "fb"
if contents is None:
- return None
+ return "fb"
formats = "|".join(DATA_SET_FORMATS)
if not re.fullmatch(formats, contents, re.IGNORECASE):
raise ValueError(
- "Value {0} is invalid for format argument. format must be of of the following: {1}.".format(
+ "Value {0} is invalid for format argument. format must be one of the following: {1}.".format(
contents, ", ".join(DATA_SET_FORMATS)
)
)
- return contents.upper()
+ return contents
# * dependent on state
def data_set_type(contents, dependencies):
"""Validates data set type is valid.
- Returns uppercase data set type."""
+ Returns uppercase data set type.
+
+ Parameters
+ ----------
+ contents : str
+ Type of the dataset.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ str
+ The data set type in uppercase. Default is PDS.
+
+ Raises
+ ------
+ ValueError
+ Value is invalid.
+ """
# if dependencies.get("state") == "absent" and contents != "MEMBER":
# return None
if contents is None:
- return "PDS"
+ return "pds"
types = "|".join(DATA_SET_TYPES)
if not re.fullmatch(types, contents, re.IGNORECASE):
raise ValueError(
@@ -893,13 +1043,35 @@ def data_set_type(contents, dependencies):
contents, ", ".join(DATA_SET_TYPES)
)
)
- return contents.upper()
+ return contents
# * dependent on state
def volumes(contents, dependencies):
"""Validates volume is valid.
- Returns uppercase volume."""
+ Returns uppercase volume.
+
+ Parameters
+ ----------
+ contents : str
+ Name of the volume.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the state is absent or contents is none.
+ str
+ The volume name.
+
+ Raises
+ ------
+ ValueError
+ Argument is invalid.
+ ValueError
+ Volume is required when state is cataloged.
+ """
if contents is None:
if dependencies.get("state") == "cataloged":
raise ValueError("Volume is required when state==cataloged.")
@@ -923,13 +1095,37 @@ def volumes(contents, dependencies):
# * dependent on type
def key_length(contents, dependencies):
"""Validates data set key length is valid.
- Returns data set key length as integer."""
+ Returns data set key length as integer.
+
+ Parameters
+ ----------
+ contents : str
+ key_length.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the state is absent or contents is none.
+ int
+ key_length.
+
+ Raises
+ ------
+ ValueError
+ Argument is invalid.
+ ValueError
+ key_length was not provided when requesting KSDS data set.
+ ValueError
+ key_length can not be provided when type is not KSDS.
+ """
if dependencies.get("state") == "absent":
return None
- if dependencies.get("type") == "KSDS" and contents is None:
+ if dependencies.get("type") == "ksds" and contents is None:
raise ValueError("key_length is required when requesting KSDS data set.")
- if dependencies.get("type") != "KSDS" and contents is not None:
- raise ValueError("key_length is only valid when type=KSDS.")
+ if dependencies.get("type") != "ksds" and contents is not None:
+ raise ValueError("key_length is only valid when type=ksds.")
if contents is None:
return None
contents = int(contents)
@@ -945,13 +1141,37 @@ def key_length(contents, dependencies):
# * dependent on key_length
def key_offset(contents, dependencies):
"""Validates data set key offset is valid.
- Returns data set key offset as integer."""
+ Returns data set key offset as integer.
+
+ Parameters
+ ----------
+ contents : str
+ Key offset of the data set.
+ dependencies : dict
+ Any dependencies needed for contents argument to be validated.
+
+ Returns
+ -------
+ None
+ If the state is absent or contents is none.
+ int
+ Key offset of the data set.
+
+ Raises
+ ------
+ ValueError
+ Argument is invalid.
+ ValueError
+ key_offset was not provided when requesting KSDS data set.
+ ValueError
+ key_offset can not be provided when type is not KSDS.
+ """
if dependencies.get("state") == "absent":
return None
- if dependencies.get("type") == "KSDS" and contents is None:
+ if dependencies.get("type") == "ksds" and contents is None:
raise ValueError("key_offset is required when requesting KSDS data set.")
- if dependencies.get("type") != "KSDS" and contents is not None:
- raise ValueError("key_offset is only valid when type=KSDS.")
+ if dependencies.get("type") != "ksds" and contents is not None:
+ raise ValueError("key_offset is only valid when type=ksds.")
if contents is None:
return None
contents = int(contents)
@@ -966,18 +1186,33 @@ def key_offset(contents, dependencies):
def perform_data_set_operations(name, state, **extra_args):
"""Calls functions to perform desired operations on
- one or more data sets. Returns boolean indicating if changes were made."""
+ one or more data sets. Returns boolean indicating if changes were made.
+
+ Parameters
+ ----------
+ name : str
+ Name of the dataset.
+ state : str
+ State of the data sets.
+ **extra_args : dict
+ Properties of the data sets.
+
+ Returns
+ -------
+ bool
+ If changes were made.
+ """
changed = False
# passing in **extra_args forced me to modify the acceptable parameters
# for multiple functions in data_set.py including ensure_present, replace
# and create where the force parameter has no bearing.
- if state == "present" and extra_args.get("type") != "MEMBER":
+ if state == "present" and extra_args.get("type") != "member":
changed = DataSet.ensure_present(name, **extra_args)
- elif state == "present" and extra_args.get("type") == "MEMBER":
+ elif state == "present" and extra_args.get("type") == "member":
changed = DataSet.ensure_member_present(name, extra_args.get("replace"))
- elif state == "absent" and extra_args.get("type") != "MEMBER":
+ elif state == "absent" and extra_args.get("type") != "member":
changed = DataSet.ensure_absent(name, extra_args.get("volumes"))
- elif state == "absent" and extra_args.get("type") == "MEMBER":
+ elif state == "absent" and extra_args.get("type") == "member":
changed = DataSet.ensure_member_absent(name, extra_args.get("force"))
elif state == "cataloged":
changed = DataSet.ensure_cataloged(name, extra_args.get("volumes"))
@@ -986,33 +1221,19 @@ def perform_data_set_operations(name, state, **extra_args):
return changed
-def fix_old_size_arg(params):
- """ for backwards compatibility with old styled size argument """
- match = None
- if params.get("size"):
- match = re.fullmatch(
- r"([1-9][0-9]*)(M|G|K|TRK|CYL)", str(params.get("size")), re.IGNORECASE
- )
- if not match:
- raise ValueError(
- 'Value {0} is invalid for size argument. Valid size measurements are "K", "M", "G", "TRK" or "CYL".'.format(
- str(params.get("size"))
- )
- )
- if params.get("space_primary"):
- match = re.fullmatch(
- r"([1-9][0-9]*)(M|G|K|TRK|CYL)",
- str(params.get("space_primary")),
- re.IGNORECASE,
- )
- if match:
- params["space_primary"] = int(match.group(1))
- params["space_type"] = match.group(2)
- return params
+def parse_and_validate_args(params):
+ """Parse and validate args.
+ Parameters
+ ----------
+ params : dict
+ Params to validated and parsed.
-def parse_and_validate_args(params):
- params = fix_old_size_arg(params)
+ Returns
+ -------
+ dict
+ Parsed args.
+ """
arg_defs = dict(
# Used for batch data set args
@@ -1030,9 +1251,18 @@ def parse_and_validate_args(params):
default="present",
choices=["present", "absent", "cataloged", "uncataloged"],
),
- type=dict(type=data_set_type, required=False, dependencies=["state"]),
+ type=dict(
+ type=data_set_type,
+ required=False,
+ dependencies=["state"],
+ choices=DATA_SET_TYPES,
+ ),
space_type=dict(
- type=space_type, required=False, dependencies=["state"]
+ type=space_type,
+ required=False,
+ dependencies=["state"],
+ choices=["k", "m", "g", "cyl", "trk"],
+ default="m",
),
space_primary=dict(type="int", required=False, dependencies=["state"]),
space_secondary=dict(
@@ -1042,7 +1272,9 @@ def parse_and_validate_args(params):
type=record_format,
required=False,
dependencies=["state"],
+ choices=["fb", "vb", "fba", "vba", "u", "f"],
aliases=["format"],
+ default="fb",
),
sms_management_class=dict(
type=sms_class, required=False, dependencies=["state"]
@@ -1114,14 +1346,22 @@ def parse_and_validate_args(params):
choices=["present", "absent", "cataloged", "uncataloged"],
),
type=dict(type=data_set_type, required=False, dependencies=["state"]),
- space_type=dict(type=space_type, required=False, dependencies=["state"]),
+ space_type=dict(
+ type=space_type,
+ required=False,
+ dependencies=["state"],
+ choices=["k", "m", "g", "cyl", "trk"],
+ default="m",
+ ),
space_primary=dict(type="int", required=False, dependencies=["state"]),
space_secondary=dict(type="int", required=False, dependencies=["state"]),
record_format=dict(
type=record_format,
required=False,
dependencies=["state"],
+ choices=["fb", "vb", "fba", "vba", "u", "f"],
aliases=["format"],
+ default="fb",
),
sms_management_class=dict(
type=sms_class, required=False, dependencies=["state"]
@@ -1179,7 +1419,7 @@ def parse_and_validate_args(params):
# ["batch", "space_type"],
# ["batch", "space_primary"],
# ["batch", "space_secondary"],
- ["batch", "record_format"],
+ # ["batch", "record_format"],
["batch", "sms_management_class"],
["batch", "sms_storage_class"],
["batch", "sms_data_class"],
@@ -1201,6 +1441,13 @@ def parse_and_validate_args(params):
def run_module():
+ """Runs the module.
+
+ Raises
+ ------
+ fail_json
+ Any exception during processing of data set params.
+ """
# TODO: add logic to handle aliases during parsing
module_args = dict(
@@ -1218,11 +1465,27 @@ def run_module():
default="present",
choices=["present", "absent", "cataloged", "uncataloged"],
),
- type=dict(type="str", required=False, default="PDS"),
- space_type=dict(type="str", required=False, default="M"),
- space_primary=dict(type="int", required=False, aliases=["size"], default=5),
+ type=dict(
+ type="str",
+ required=False,
+ default="pds",
+ choices=DATA_SET_TYPES,
+ ),
+ space_type=dict(
+ type="str",
+ required=False,
+ default="m",
+ choices=["k", "m", "g", "cyl", "trk"],
+ ),
+ space_primary=dict(type="int", required=False, default=5),
space_secondary=dict(type="int", required=False, default=3),
- record_format=dict(type="str", required=False, aliases=["format"], default="FB"),
+ record_format=dict(
+ type="str",
+ required=False,
+ aliases=["format"],
+ default="fb",
+ choices=["fb", "vb", "fba", "vba", "u", "f"],
+ ),
sms_management_class=dict(type="str", required=False),
# I know this alias is odd, ZOAU used to document they supported
# SMS data class when they were actually passing as storage class
@@ -1267,11 +1530,27 @@ def run_module():
default="present",
choices=["present", "absent", "cataloged", "uncataloged"],
),
- type=dict(type="str", required=False, default="PDS"),
- space_type=dict(type="str", required=False, default="M"),
- space_primary=dict(type="raw", required=False, aliases=["size"], default=5),
+ type=dict(
+ type="str",
+ required=False,
+ default="pds",
+ choices=DATA_SET_TYPES,
+ ),
+ space_type=dict(
+ type="str",
+ required=False,
+ default="m",
+ choices=["k", "m", "g", "cyl", "trk"],
+ ),
+ space_primary=dict(type="int", required=False, default=5),
space_secondary=dict(type="int", required=False, default=3),
- record_format=dict(type="str", required=False, aliases=["format"], default="FB"),
+ record_format=dict(
+ type="str",
+ required=False,
+ aliases=["format"],
+ choices=["fb", "vb", "fba", "vba", "u", "f"],
+ default="fb"
+ ),
sms_management_class=dict(type="str", required=False),
# I know this alias is odd, ZOAU used to document they supported
# SMS data class when they were actually passing as storage class
@@ -1319,9 +1598,10 @@ def run_module():
# This evaluation will always occur as a result of the limitation on the
# better arg parser, this will serve as a solution for now and ensure
# the non-batch and batch arguments are correctly set
+ # This section is copied down inside if/check_mode false, so it modifies after the arg parser
if module.params.get("batch") is not None:
for entry in module.params.get("batch"):
- if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM:
+ if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM:
entry["record_format"] = None
if module.params.get("type") is not None:
module.params["type"] = None
@@ -1338,9 +1618,11 @@ def run_module():
if module.params.get("record_format") is not None:
module.params["record_format"] = None
elif module.params.get("type") is not None:
- if module.params.get("type").upper() in DATA_SET_TYPES_VSAM:
+ if module.params.get("type") in DATA_SET_TYPES_VSAM:
# For VSAM types set the value to nothing and let the code manage it
- module.params["record_format"] = None
+ # module.params["record_format"] = None
+ if module.params.get("record_format") is not None:
+ del module.params["record_format"]
if not module.check_mode:
try:
@@ -1353,6 +1635,30 @@ def run_module():
result["names"] = [d.get("name", "") for d in data_set_param_list]
for data_set_params in data_set_param_list:
+ # This *appears* redundant, bit the parse_and_validate reinforces the default value for record_type
+ if data_set_params.get("batch") is not None:
+ for entry in data_set_params.get("batch"):
+ if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM:
+ entry["record_format"] = None
+ if data_set_params.get("type") is not None:
+ data_set_params["type"] = None
+ if data_set_params.get("state") is not None:
+ data_set_params["state"] = None
+ if data_set_params.get("space_type") is not None:
+ data_set_params["space_type"] = None
+ if data_set_params.get("space_primary") is not None:
+ data_set_params["space_primary"] = None
+ if data_set_params.get("space_secondary") is not None:
+ data_set_params["space_secondary"] = None
+ if data_set_params.get("replace") is not None:
+ data_set_params["replace"] = None
+ if data_set_params.get("record_format") is not None:
+ data_set_params["record_format"] = None
+ else:
+ if data_set_params.get("type") in DATA_SET_TYPES_VSAM:
+ if data_set_params.get("record_format") is not None:
+ data_set_params["record_format"] = None
+
# remove unnecessary empty batch argument
result["changed"] = perform_data_set_operations(
**data_set_params
diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py
index 2628ab174..e9afa4994 100644
--- a/plugins/modules/zos_encode.py
+++ b/plugins/modules/zos_encode.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -140,8 +140,8 @@
encoding:
from: IBM-1047
to: ISO8859-1
- backup: yes
- backup_compress: yes
+ backup: true
+ backup_compress: true
- name: Convert file encoding from IBM-1047 to ISO8859-1 to a directory
zos_encode:
@@ -249,7 +249,6 @@
encoding:
from: ISO8859-1
to: IBM-1047
-
"""
RETURN = r"""
@@ -273,7 +272,7 @@
sample: /path/file_name.2020-04-23-08-32-29-bak.tar
"""
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
better_arg_parser,
@@ -286,14 +285,34 @@
from os import makedirs
from os import listdir
import re
+import traceback
try:
from zoautil_py import datasets
except Exception:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
def check_pds_member(ds, mem):
+ """Check if a member exists in a PDS.
+
+ Parameters
+ ----------
+ ds : str
+ PDS data set name.
+ mem : str
+ Member name to check if is under PDS.
+
+ Returns
+ -------
+ bool
+ If it is a member of the data set.
+
+ Raises
+ ------
+ EncodeError
+ Can not find member in provided dataset.
+ """
check_rc = False
if mem in datasets.list_members(ds):
check_rc = True
@@ -303,7 +322,25 @@ def check_pds_member(ds, mem):
def check_mvs_dataset(ds):
- """ To call data_set utils to check if the MVS data set exists or not """
+ """To call data_set utils to check if the MVS data set exists or not.
+
+ Parameters
+ ----------
+ ds : str
+ Data set name.
+
+ Returns
+ -------
+ tuple(bool,str)
+ If the data set exists and it's type.
+
+ Raises
+ ------
+ EncodeError
+ If data set is not cataloged.
+ EncodeError
+ Unable to determine data set type.
+ """
check_rc = False
ds_type = None
if not data_set.DataSet.data_set_exists(ds):
@@ -320,7 +357,23 @@ def check_mvs_dataset(ds):
def check_file(file):
- """ check file is a USS file or an MVS data set """
+ """Check file is a USS file or an MVS data set.
+
+ Parameters
+ ----------
+ file : str
+ File to check.
+
+ Returns
+ -------
+ tuple(bool,bool,str)
+ If is USS file, MVS dataset, and the dataset type.
+
+ Raises
+ ------
+ EncodeError
+ The data set is not partitioned.
+ """
is_uss = False
is_mvs = False
ds_type = None
@@ -346,6 +399,18 @@ def check_file(file):
def verify_uss_path_exists(file):
+ """Verify if USS path exists.
+
+ Parameters
+ ----------
+ file : str
+ Path of the file.
+
+ Raises
+ ------
+ EncodeError
+ File does not exist in the directory.
+ """
if not path.exists(file):
mypath = "/" + file.split("/")[0] + "/*"
ld = listdir(mypath)
@@ -358,6 +423,13 @@ def verify_uss_path_exists(file):
def run_module():
+ """Runs the module.
+
+ Raises
+ ------
+ fail_json
+ Exception during execution.
+ """
module_args = dict(
src=dict(type="str", required=True),
dest=dict(type="str"),
@@ -529,6 +601,18 @@ def run_module():
class EncodeError(Exception):
def __init__(self, message):
+ """Error during encoding.
+
+ Parameters
+ ----------
+ message : str
+ Human readable string describing the exception.
+
+ Attributes
+ ----------
+ msg : str
+ Human readable string describing the exception.
+ """
self.msg = 'An error occurred during encoding: "{0}"'.format(message)
super(EncodeError, self).__init__(self.msg)
diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py
index edf0dc8e8..8b4d4809d 100644
--- a/plugins/modules/zos_fetch.py
+++ b/plugins/modules/zos_fetch.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019 - 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -272,7 +272,7 @@
import tempfile
import re
import os
-
+import traceback
from math import ceil
from shutil import rmtree
from ansible.module_utils.basic import AnsibleModule
@@ -286,16 +286,16 @@
validation,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
try:
- from zoautil_py import datasets, mvscmd, types
+ from zoautil_py import datasets, mvscmd, ztypes
except Exception:
- datasets = MissingZOAUImport()
- mvscmd = MissingZOAUImport()
- types = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
+ mvscmd = ZOAUImportError(traceback.format_exc())
+ ztypes = ZOAUImportError(traceback.format_exc())
class FetchHandler:
@@ -303,16 +303,50 @@ def __init__(self, module):
self.module = module
def _fail_json(self, **kwargs):
- """ Wrapper for AnsibleModule.fail_json """
+ """Wrapper for AnsibleModule.fail_json.
+
+ Parameters
+ ----------
+ **kwargs : dict
+ Arguments to pass to fail_json().
+ """
self.module.fail_json(**kwargs)
def _run_command(self, cmd, **kwargs):
- """ Wrapper for AnsibleModule.run_command """
+ """Wrapper for AnsibleModule.run_command.
+
+ Parameters
+ ----------
+ cmd : str
+ Command to run.
+ **kwargs : dict
+ Arguments to pass to run_command().
+
+ Returns
+ -------
+ tuple(int,str,str)
+ Return code, standard output and standard error.
+ """
return self.module.run_command(cmd, **kwargs)
def _get_vsam_size(self, vsam):
"""Invoke IDCAMS LISTCAT command to get the record length and space used.
Then estimate the space used by the VSAM data set.
+
+ Parameters
+ ----------
+ vsam : str
+ VSAM data set name.
+
+ Returns
+ -------
+ tuple(int,int,int)
+ Total size, max_recl and rec_total.
+
+ Raises
+ ------
+ fail_json
+ Unable to obtain data set information.
"""
space_pri = 0
total_size = 0
@@ -350,7 +384,27 @@ def _get_vsam_size(self, vsam):
return total_size, max_recl, rec_total
def _copy_vsam_to_temp_data_set(self, ds_name):
- """ Copy VSAM data set to a temporary sequential data set """
+ """Copy VSAM data set to a temporary sequential data set.
+
+ Parameters
+ ----------
+ ds_name : str
+ VSAM dataset name to be copied into a temp data set.
+
+ Returns
+ -------
+ str
+ Temporary dataset name.
+
+ Raises
+ ------
+ fail_json
+ OS error.
+ fail_json
+ cmd error while copying dataset.
+ fail_json
+ Failed to call IDCAMS.
+ """
mvs_rc = 0
vsam_size, max_recl, rec_total = self._get_vsam_size(ds_name)
# Default in case of max recl being 80 to avoid failures when fetching and empty vsam.
@@ -374,23 +428,23 @@ def _copy_vsam_to_temp_data_set(self, ds_name):
dd_statements = []
dd_statements.append(
- types.DDStatement(
- name="sysin", definition=types.DatasetDefinition(sysin)
+ ztypes.DDStatement(
+ name="sysin", definition=ztypes.DatasetDefinition(sysin)
)
)
dd_statements.append(
- types.DDStatement(
- name="input", definition=types.DatasetDefinition(ds_name)
+ ztypes.DDStatement(
+ name="input", definition=ztypes.DatasetDefinition(ds_name)
)
)
dd_statements.append(
- types.DDStatement(
- name="output", definition=types.DatasetDefinition(out_ds_name)
+ ztypes.DDStatement(
+ name="output", definition=ztypes.DatasetDefinition(out_ds_name)
)
)
dd_statements.append(
- types.DDStatement(
- name="sysprint", definition=types.FileDefinition(sysprint)
+ ztypes.DDStatement(
+ name="sysprint", definition=ztypes.FileDefinition(sysprint)
)
)
@@ -442,6 +496,25 @@ def _copy_vsam_to_temp_data_set(self, ds_name):
def _fetch_uss_file(self, src, is_binary, encoding=None):
"""Convert encoding of a USS file. Return a tuple of temporary file
name containing converted data.
+
+ Parameters
+ ----------
+ src : str
+ Source of the file.
+ is_binary : bool
+ If is binary.
+ encoding : str
+ The file encoding.
+
+ Returns
+ -------
+ str
+ File name with the converted data.
+
+ Raises
+ ------
+ fail_json
+ Any exception ocurred while converting encoding.
"""
file_path = None
if (not is_binary) and encoding:
@@ -471,6 +544,25 @@ def _fetch_uss_file(self, src, is_binary, encoding=None):
def _fetch_vsam(self, src, is_binary, encoding=None):
"""Copy the contents of a VSAM to a sequential data set.
Afterwards, copy that data set to a USS file.
+
+ Parameters
+ ----------
+ src : str
+ Source of the file.
+ is_binary : bool
+ If is binary.
+ encoding : str
+ The file encoding.
+
+ Returns
+ -------
+ str
+ USS File containing the encoded content of the input data set.
+
+ Raises
+ ------
+ fail_json
+ Unable to delete temporary dataset.
"""
temp_ds = self._copy_vsam_to_temp_data_set(src)
file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding)
@@ -487,6 +579,27 @@ def _fetch_pdse(self, src, is_binary, encoding=None):
"""Copy a partitioned data set to a USS directory. If the data set
is not being fetched in binary mode, encoding for all members inside
the data set will be converted.
+
+ Parameters
+ ----------
+ src : str
+ Source of the dataset.
+ is_binary : bool
+ If is binary.
+ encoding : str
+ The file encoding.
+
+ Returns
+ -------
+ str
+ Directory path containing the files of the converted data set members.
+
+ Raises
+ ------
+ fail_json
+ Error copying partitioned dataset to USS.
+ fail_json
+ Error converting encoding of the member.
"""
dir_path = tempfile.mkdtemp()
cmd = "cp -B \"//'{0}'\" {1}"
@@ -531,7 +644,28 @@ def _fetch_pdse(self, src, is_binary, encoding=None):
def _fetch_mvs_data(self, src, is_binary, encoding=None):
"""Copy a sequential data set or a partitioned data set member
- to a USS file
+ to a USS file.
+
+ Parameters
+ ----------
+ src : str
+ Source of the dataset.
+ is_binary : bool
+ If is binary.
+ encoding : str
+ The file encoding.
+
+ Returns
+ -------
+ str
+ USS File containing the encoded content of the input data set.
+
+ Raises
+ ------
+ fail_json
+ Unable to copy to USS.
+ fail_json
+ Error converting encoding of the dataset.
"""
fd, file_path = tempfile.mkstemp()
os.close(fd)
@@ -571,6 +705,23 @@ def _fetch_mvs_data(self, src, is_binary, encoding=None):
def run_module():
+ """Runs the module.
+
+ Raises
+ ------
+ fail_json
+ When parameter verification fails.
+ fail_json
+ When the source does not exist or is uncataloged.
+ fail_json
+ When it's unable to determine dataset type.
+ fail_json
+ While gathering dataset information.
+ fail_json
+ When the data set member was not found inside a dataset.
+ fail_json
+ When the file does not have appropriate read permissions.
+ """
# ********************************************************** #
# Module initialization #
# ********************************************************** #
@@ -585,14 +736,13 @@ def run_module():
validate_checksum=dict(required=False, default=True, type="bool"),
encoding=dict(required=False, type="dict"),
ignore_sftp_stderr=dict(type="bool", default=False, required=False),
- local_charset=dict(type="str"),
tmp_hlq=dict(required=False, type="str", default=None),
)
)
src = module.params.get("src")
if module.params.get("use_qualifier"):
- module.params["src"] = datasets.hlq() + "." + src
+ module.params["src"] = datasets.get_hlq() + "." + src
# ********************************************************** #
# Verify paramater validity #
@@ -607,7 +757,7 @@ def run_module():
tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None),
)
- if not module.params.get("encoding") and not module.params.get("is_binary"):
+ if not module.params.get("encoding").get("from") and not module.params.get("is_binary"):
mvs_src = data_set.is_data_set(src)
remote_charset = encode.Defaults.get_default_system_charset()
@@ -615,10 +765,13 @@ def run_module():
"from": encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET
if mvs_src
else remote_charset,
- "to": module.params.get("local_charset"),
+ "to": module.params.get("encoding").get("to"),
}
- if module.params.get("encoding"):
+ # We check encoding 'from' and 'to' because if the user pass both arguments of encoding,
+ # we honor those but encoding 'to' is an argument that the code obtain any time.
+ # Encoding will not be null and will generate problems as encoding 'from' could came empty.
+ if module.params.get("encoding").get("from") and module.params.get("encoding").get("to"):
module.params.update(
dict(
from_encoding=module.params.get("encoding").get("from"),
diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py
index b49d65f04..b269c472d 100644
--- a/plugins/modules/zos_find.py
+++ b/plugins/modules/zos_find.py
@@ -31,6 +31,7 @@
author:
- "Asif Mahmud (@asifmahmud)"
- "Demetrios Dimatos (@ddimatos)"
+ - "Fernando Flores (@fernandofloresg)"
options:
age:
description:
@@ -276,18 +277,28 @@
def content_filter(module, patterns, content):
""" Find data sets that match any pattern in a list of patterns and
- contains the given content
-
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used in the module
- patterns {list[str]} -- A list of data set patterns
- content {str} -- The content string to search for within matched data sets
-
- Returns:
- dict[ps=set, pds=dict[str, str], searched=int] -- A dictionary containing
+ contains the given content.
+
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used in the module.
+ patterns : list[str]
+ A list of data set patterns.
+ content : str
+ The content string to search for within matched data sets.
+
+ Returns
+ -------
+ dict[ps=set, pds=dict[str, str], searched=int]
+ A dictionary containing
a set of matched "PS" data sets, a dictionary containing "PDS" data sets
and members corresponding to each PDS, an int representing number of total
data sets examined.
+
+ Raises
+ ------
+ fail_json: Non-zero return code received while executing ZOAU shell command 'dgrep'.
"""
filtered_data_sets = dict(ps=set(), pds=dict(), searched=0)
for pattern in patterns:
@@ -320,15 +331,25 @@ def content_filter(module, patterns, content):
def data_set_filter(module, pds_paths, patterns):
""" Find data sets that match any pattern in a list of patterns.
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used
- patterns {list[str]} -- A list of data set patterns
-
- Returns:
- dict[ps=set, pds=dict[str, str], searched=int] -- A dictionary containing
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used.
+ patterns : list[str]
+ A list of data set patterns.
+
+ Returns
+ -------
+ dict[ps=set, pds=dict[str, str], searched=int]
+ A dictionary containing
a set of matched "PS" data sets, a dictionary containing "PDS" data sets
and members corresponding to each PDS, an int representing number of total
data sets examined.
+
+ Raises
+ ------
+ fail_json
+ Non-zero return code received while executing ZOAU shell command 'dls'.
"""
filtered_data_sets = dict(ps=set(), pds=dict(), searched=0)
patterns = pds_paths or patterns
@@ -371,15 +392,21 @@ def pds_filter(module, pds_dict, member_patterns, excludes=None):
""" Return all PDS/PDSE data sets whose members match any of the patterns
in the given list of member patterns.
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used in the module
- pds_dict {dict[str, str]} -- A dictionary where each key is the name of
- of the PDS/PDSE and the value is a list of
- members belonging to the PDS/PDSE
- member_patterns {list} -- A list of member patterns to search for
-
- Returns:
- dict[str, set[str]] -- Filtered PDS/PDSE with corresponding members
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used in the module.
+ pds_dict : dict[str, str]
+ A dictionary where each key is the name of
+ of the PDS/PDSE and the value is a list of
+ members belonging to the PDS/PDSE.
+ member_patterns : list
+ A list of member patterns to search for.
+
+ Returns
+ -------
+ dict[str, set[str]]
+ Filtered PDS/PDSE with corresponding members.
"""
filtered_pds = dict()
for pds, members in pds_dict.items():
@@ -411,12 +438,22 @@ def vsam_filter(module, patterns, resource_type, age=None):
""" Return all VSAM data sets that match any of the patterns
in the given list of patterns.
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used
- patterns {list[str]} -- A list of data set patterns
-
- Returns:
- set[str]-- Matched VSAM data sets
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used.
+ patterns : list[str]
+ A list of data set patterns.
+
+ Returns
+ -------
+ set[str]
+ Matched VSAM data sets.
+
+ Raises
+ ------
+ fail_json
+ Non-zero return code received while executing ZOAU shell command 'vls'.
"""
filtered_data_sets = set()
now = time.time()
@@ -446,14 +483,26 @@ def data_set_attribute_filter(
):
""" Filter data sets based on attributes such as age or size.
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used
- data_sets {set[str]} -- A set of data set names
- size {int} -- The size, in bytes, that should be used to filter data sets
- age {int} -- The age, in days, that should be used to filter data sets
-
- Returns:
- set[str] -- Matched data sets filtered by age and size
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used.
+ data_sets : set[str]
+ A set of data set names.
+ size : int
+ The size, in bytes, that should be used to filter data sets.
+ age : int
+ The age, in days, that should be used to filter data sets.
+
+ Returns
+ -------
+ set[str]
+ Matched data sets filtered by age and size.
+
+ Raises
+ ------
+ fail_json
+ Non-zero return code received while executing ZOAU shell command 'dls'.
"""
filtered_data_sets = set()
now = time.time()
@@ -479,7 +528,7 @@ def data_set_attribute_filter(
age and not size and _age_filter(ds_age, now, age)
) or
(
- size and not age and _size_filter(int(out[5]), size)
+ size and not age and _size_filter(int(out[6]), size)
)
):
filtered_data_sets.add(ds)
@@ -493,13 +542,24 @@ def volume_filter(module, data_sets, volumes):
"""Return only the data sets that are allocated in one of the volumes from
the list of input volumes.
- Arguments:
- module {AnsibleModule} -- The Ansible module object
- data_sets {set[str]} -- A set of data sets to be filtered
- volumes {list[str]} -- A list of input volumes
-
- Returns:
- set[str] -- The filtered data sets
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object.
+ data_sets : set[str]
+ A set of data sets to be filtered.
+ volumes : list[str]
+ A list of input volumes.
+
+ Returns
+ -------
+ set[str]
+ The filtered data sets.
+
+ Raises
+ ------
+ fail_json
+ Unable to retrieve VTOC information.
"""
filtered_data_sets = set()
for volume in volumes:
@@ -517,15 +577,21 @@ def volume_filter(module, data_sets, volumes):
def exclude_data_sets(module, data_set_list, excludes):
- """Remove data sets that match any pattern in a list of patterns
-
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used
- data_set_list {set[str]} -- A set of data sets to be filtered
- excludes {list[str]} -- A list of data set patterns to be excluded
-
- Returns:
- set[str] -- The remaining data sets that have not been excluded
+ """Remove data sets that match any pattern in a list of patterns.
+
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used.
+ data_set_list : set[str]
+ A set of data sets to be filtered.
+ excludes : list[str]
+ A list of data set patterns to be excluded.
+
+ Returns
+ -------
+ set[str]
+ The remaining data sets that have not been excluded.
"""
for ds in set(data_set_list):
for ex_pat in excludes:
@@ -536,15 +602,21 @@ def exclude_data_sets(module, data_set_list, excludes):
def _age_filter(ds_date, now, age):
- """Determine whether a given date is older than 'age'
-
- Arguments:
- ds_date {str} -- The input date in the format YYYY/MM/DD
- now {float} -- The time elapsed since the last epoch
- age {int} -- The age, in days, to compare against
-
- Returns:
- bool -- Whether 'ds_date' is older than 'age'
+ """Determine whether a given date is older than 'age'.
+
+ Parameters
+ ----------
+ ds_date : str
+ The input date in the format YYYY/MM/DD.
+ now : float
+ The time elapsed since the last epoch.
+ age : int
+ The age, in days, to compare against.
+
+ Returns
+ -------
+ bool
+ Whether 'ds_date' is older than 'age'.
"""
year, month, day = list(map(int, ds_date.split("/")))
if year == "0000":
@@ -560,14 +632,24 @@ def _age_filter(ds_date, now, age):
def _get_creation_date(module, ds):
- """Retrieve the creation date for a given data set
-
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used
- ds {str} -- The name of the data set
-
- Returns:
- str -- The data set creation date in the format "YYYY/MM/DD"
+ """Retrieve the creation date for a given data set.
+
+ Arguments
+ ---------
+ module : AnsibleModule
+ The Ansible module object being used.
+ ds : str
+ The name of the data set.
+
+ Returns
+ -------
+ str
+ The data set creation date in the format "YYYY/MM/DD".
+
+ Raises
+ ------
+ fail_json
+ Non-zero return code received while retrieving data set age.
"""
rc, out, err = mvs_cmd.idcams(
" LISTCAT ENT('{0}') HISTORY".format(ds), authorized=True
@@ -595,14 +677,19 @@ def _get_creation_date(module, ds):
def _size_filter(ds_size, size):
- """ Determine whether a given size is greater than the input size
-
- Arguments:
- ds_size {int} -- The input size, in bytes
- size {int} -- The size, in bytes, to compare against
-
- Returns:
- bool -- Whether 'ds_size' is greater than 'age'
+ """Determine whether a given size is greater than the input size.
+
+ Parameters
+ ----------
+ ds_size : int
+ The input size, in bytes.
+ size : int
+ The size, in bytes, to compare against.
+
+ Returns
+ -------
+ bool
+ Whether 'ds_size' is greater than 'age'.
"""
if size >= 0 and ds_size >= abs(size):
return True
@@ -612,15 +699,26 @@ def _size_filter(ds_size, size):
def _match_regex(module, pattern, string):
- """ Determine whether the input regex pattern matches the string
-
- Arguments:
- module {AnsibleModule} -- The Ansible module object being used
- pattern {str} -- The regular expression to match
- string {str} -- The string to match
-
- Returns:
- re.Match -- A Match object that matches the pattern to string
+ """Determine whether the input regex pattern matches the string.
+
+ Parameters
+ ----------
+ module : AnsibleModule
+ The Ansible module object being used.
+ pattern : str
+ The regular expression to match.
+ string : str
+ The string to match.
+
+ Returns
+ -------
+ re.Match
+ A Match object that matches the pattern to string.
+
+ Raises
+ ------
+ fail_json
+ Invalid regular expression.
"""
try:
return fullmatch(pattern, string, re.IGNORECASE)
@@ -639,7 +737,28 @@ def _dgrep_wrapper(
verbose=False,
context=None
):
- """A wrapper for ZOAU 'dgrep' shell command"""
+ """A wrapper for ZOAU 'dgrep' shell command.
+
+ Parameters
+ ----------
+ data_set_pattern : str
+ Data set pattern where to search for content.
+ content : str
+ Content to search across the data sets specified in data_set_pattern.
+ ignore_case : bool
+ Whether to ignore case or not.
+ line_num : bool
+ Whether to display line numbers.
+ verbose : bool
+ Extra verbosity, prints names of datasets being searched.
+ context : int
+ If context lines are requested, then up to lines before and after the matching line are also printed.
+
+ Returns
+ -------
+ tuple(int,str,str)
+ Return code, standard output and standard error.
+ """
dgrep_cmd = "dgrep"
if ignore_case:
dgrep_cmd += " -i"
@@ -662,7 +781,28 @@ def _dls_wrapper(
verbose=False,
migrated=False
):
- """A wrapper for ZOAU 'dls' shell command"""
+ """A wrapper for ZOAU 'dls' shell command.
+
+ Parameters
+ ----------
+ data_set_pattern : str
+ Data set pattern.
+ list_details : bool
+ Display detailed information based on the dataset type.
+ u_time : bool
+ Display last usage time.
+ size : bool
+ Display size in list.
+ verbose : bool
+ Display verbose information.
+ migrated : bool
+ Display migrated data sets.
+
+ Returns
+ -------
+ tuple(int,str,str)
+ Return code, standard output and standard error.
+ """
dls_cmd = "dls"
if migrated:
dls_cmd += " -m"
@@ -681,7 +821,22 @@ def _dls_wrapper(
def _vls_wrapper(pattern, details=False, verbose=False):
- """A wrapper for ZOAU 'vls' shell command"""
+ """A wrapper for ZOAU 'vls' shell command.
+
+ Parameters
+ ----------
+ pattern : str
+ Data set pattern.
+ details : bool
+ Display detailed information based on the dataset type.
+ verbose : bool
+ Display verbose information.
+
+ Returns
+ -------
+ tuple(int,str,str)
+ Return code, standard output and standard error.
+ """
vls_cmd = "vls"
if details:
vls_cmd += " -l"
@@ -693,6 +848,20 @@ def _vls_wrapper(pattern, details=False, verbose=False):
def _match_resource_type(type1, type2):
+ """Compare that the two types match.
+
+ Parameters
+ ----------
+ type1 : str
+ One of the types that are expected to match.
+ type2 : str
+ One of the types that are expected to match.
+
+ Returns
+ -------
+ bool
+ If the types match.
+ """
if type1 == type2:
return True
if type1 == "CLUSTER" and type2 not in ("DATA", "INDEX"):
@@ -701,13 +870,17 @@ def _match_resource_type(type1, type2):
def _ds_type(ds_name):
- """Utility function to determine the DSORG of a data set
+ """Utility function to determine the DSORG of a data set.
- Arguments:
- ds_name {str} -- The name of the data set
+ Parameters
+ ----------
+ ds_name : str
+ The name of the data set.
- Returns:
- str -- The DSORG of the data set
+ Returns
+ -------
+ str
+ The DSORG of the data set.
"""
rc, out, err = mvs_cmd.ikjeft01(
" LISTDS '{0}'".format(ds_name),
@@ -720,6 +893,25 @@ def _ds_type(ds_name):
def run_module(module):
+ """Initialize parameters.
+
+ Parameters
+ ----------
+ module : AnsibleModule
+ Ansible Module.
+
+ Returns
+ -------
+ dict
+ Arguments.
+
+ Raises
+ ------
+ fail_json
+ Failed to process age.
+ fail_json
+ Failed to process size.
+ """
# Parameter initialization
age = module.params.get('age')
age_stamp = module.params.get('age_stamp')
@@ -816,6 +1008,13 @@ def run_module(module):
def main():
+ """Initialize module when it's run as main.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ """
module = AnsibleModule(
argument_spec=dict(
age=dict(type="str", required=False),
diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py
index b7aeb7ee4..eb7699cdb 100644
--- a/plugins/modules/zos_gather_facts.py
+++ b/plugins/modules/zos_gather_facts.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2022, 2023
+# Copyright (c) IBM Corporation 2022, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -108,30 +108,45 @@
"""
from fnmatch import fnmatch
-import json
+import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
zoau_version_checker
)
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
+ ZOAUImportError,
+)
+
+try:
+ from zoautil_py import zsystem
+except ImportError:
+ zsystem = ZOAUImportError(traceback.format_exc())
+
-def zinfo_cmd_string_builder(gather_subset):
- """Builds a command string for 'zinfo' based off the gather_subset list.
- Arguments:
- gather_subset {list} -- A list of subsets to pass in.
- Returns:
- [str] -- A string that contains a command line argument for calling
- zinfo with the appropriate options.
- [None] -- An invalid value was received for the subsets.
+def zinfo_facts_list_builder(gather_subset):
+ """Builds a list of strings to pass into 'zinfo' based off the
+ gather_subset list.
+
+ Parameters
+ ----------
+ gather_subset : list
+ A list of subsets to pass in.
+
+ Returns
+ -------
+ Union[str]
+ A list of strings that contains sanitized subsets.
+ None
+ An invalid value was received for the subsets.
"""
if gather_subset is None or 'all' in gather_subset:
- return "zinfo -j -a"
+ return ["all"]
# base value
- zinfo_arg_string = "zinfo -j"
+ subsets_list = []
- # build full string
for subset in gather_subset:
# remove leading/trailing spaces
subset = subset.strip()
@@ -141,19 +156,25 @@ def zinfo_cmd_string_builder(gather_subset):
# sanitize subset against malicious (probably alphanumeric only?)
if not subset.isalnum():
return None
- zinfo_arg_string += " -t " + subset
+ subsets_list.append(subset)
- return zinfo_arg_string
+ return subsets_list
def flatten_zinfo_json(zinfo_dict):
"""Removes one layer of mapping in the dictionary. Top-level keys
correspond to zinfo subsets and are removed.
- Arguments:
- zinfo_dict {dict} -- A dictionary that contains the parsed result from
- the zinfo json string.
- Returns:
- [dict] -- A flattened dictionary.
+
+ Parameters
+ ----------
+ zinfo_dict : dict
+ A dictionary that contains the parsed result from
+ the zinfo json string.
+
+ Returns
+ -------
+ dict
+ A flattened dictionary.
"""
d = {}
for subset in list(zinfo_dict):
@@ -164,13 +185,20 @@ def flatten_zinfo_json(zinfo_dict):
def apply_filter(zinfo_dict, filter_list):
"""Returns a dictionary that contains only the keys which fit the specified
filters.
- Arguments:
- zinfo_dict {dict} -- A flattened dictionary that contains results from
- zinfo.
- filter_list {list} -- A string list of shell wildcard patterns (i.e.
- 'filters') to apply to the zinfo_dict keys.
- Returns:
- [dict] -- A dictionary with keys that are filtered out.
+
+ Parameters
+ ----------
+ zinfo_dict : dict
+ A flattened dictionary that contains results from
+ zinfo.
+ filter_list : list
+ A string list of shell wildcard patterns (i.e.
+ 'filters') to apply to the zinfo_dict keys.
+
+ Returns
+ -------
+ dict
+ A dictionary with keys that are filtered out.
"""
if filter_list is None or filter_list == [] or '*' in filter_list:
@@ -185,6 +213,19 @@ def apply_filter(zinfo_dict, filter_list):
def run_module():
+ """Initialize module.
+
+ Raises
+ ------
+ fail_json
+ The zos_gather_facts module requires ZOAU >= 1.3.0.
+ fail_json
+ An invalid subset was passed to Ansible.
+ fail_json
+ An invalid subset was detected.
+ fail_json
+ An exception has occurred. Unable to gather facts.
+ """
# define available arguments/parameters a user can pass to the module
module_args = dict(
gather_subset=dict(
@@ -214,59 +255,36 @@ def run_module():
if module.check_mode:
module.exit_json(**result)
- if not zoau_version_checker.is_zoau_version_higher_than("1.2.1"):
+ if not zoau_version_checker.is_zoau_version_higher_than("1.3.0"):
module.fail_json(
- ("The zos_gather_facts module requires ZOAU >= 1.2.1. Please "
+ ("The zos_gather_facts module requires ZOAU >= 1.3.0. Please "
"upgrade the ZOAU version on the target node.")
)
gather_subset = module.params['gather_subset']
- # build out zinfo command with correct options
+ # build out list of strings to pass to zinfo python api.
# call this whether or not gather_subsets list is empty/valid/etc
- # rely on the function to report back errors. Note the function only
+ # rely on the helper function to report back errors. Note the function only
# returns None if there's malicious or improperly formatted subsets.
- # Invalid subsets are caught when the actual zinfo command is run.
- cmd = zinfo_cmd_string_builder(gather_subset)
- if not cmd:
+ # Invalid subsets are caught when the actual zinfo function is run.
+ facts_list = zinfo_facts_list_builder(gather_subset)
+ if not facts_list:
module.fail_json(msg="An invalid subset was passed to Ansible.")
- rc, fcinfo_out, err = module.run_command(cmd, encoding=None)
-
- decode_str = fcinfo_out.decode('utf-8')
-
- # We DO NOT return a partial list. Instead we FAIL FAST since we are
- # targeting automation -- quiet but well-intended error messages may easily
- # be skipped
- if rc != 0:
- # there are 3 known error messages in zinfo, if neither gets
- # triggered then we send out this generic zinfo error message.
- err_msg = ('An exception has occurred in Z Open Automation Utilities '
- '(ZOAU) utility \'zinfo\'. See \'zinfo_err_msg\' for '
- 'additional details.')
- # triggered by invalid optarg eg "zinfo -q"
- if 'BGYSC5201E' in err.decode('utf-8'):
- err_msg = ('Invalid call to zinfo. See \'zinfo_err_msg\' for '
- 'additional details.')
- # triggered when optarg does not get expected arg eg "zinfo -t"
- elif 'BGYSC5202E' in err.decode('utf-8'):
- err_msg = ('Invalid call to zinfo. Possibly missing a valid subset'
- ' See \'zinfo_err_msg\' for additional details.')
- # triggered by illegal subset eg "zinfo -t abc"
- elif 'BGYSC5203E' in err.decode('utf-8'):
- err_msg = ('An invalid subset was detected. See \'zinfo_err_msg\' '
- 'for additional details.')
-
- module.fail_json(msg=err_msg, zinfo_err_msg=err)
-
zinfo_dict = {} # to track parsed zinfo facts.
try:
- zinfo_dict = json.loads(decode_str)
- except json.JSONDecodeError:
- # tell user something else for this error? This error is thrown when
- # Python doesn't like the json string it parsed from zinfo.
- module.fail_json(msg="Unsupported JSON format for the output.")
+ zinfo_dict = zsystem.zinfo(json=True, facts=facts_list)
+ except ValueError:
+ err_msg = 'An invalid subset was detected.'
+ module.fail_json(msg=err_msg)
+ except Exception as e:
+ err_msg = (
+ 'An exception has occurred. Unable to gather facts. '
+ 'See stderr for more details.'
+ )
+ module.fail_json(msg=err_msg, stderr=str(e))
# remove zinfo subsets from parsed zinfo result, flatten by one level
flattened_d = flatten_zinfo_json(zinfo_dict)
diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py
index 40c7d61d0..21e0af3e6 100644
--- a/plugins/modules/zos_job_output.py
+++ b/plugins/modules/zos_job_output.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -414,15 +414,36 @@
from ansible.module_utils.basic import AnsibleModule
+import traceback
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
+ ZOAUImportError,
+)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import (
job_output,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
better_arg_parser
)
+try:
+ from zoautil_py import exceptions as zoau_exceptions
+except Exception:
+ zoau_exceptions = ZOAUImportError(traceback.format_exc())
def run_module():
+ """Initialize module.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ job_id or job_name or owner not provided.
+ fail_json
+ ZOAU exception.
+ fail_json
+ Any exception while fetching jobs.
+ """
module_args = dict(
job_id=dict(type="str", required=False),
job_name=dict(type="str", required=False),
@@ -461,6 +482,13 @@ def run_module():
results = {}
results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=ddname)
results["changed"] = False
+ except zoau_exceptions.JobFetchException as fetch_exception:
+ module.fail_json(
+ msg="ZOAU exception",
+ rc=fetch_exception.response.rc,
+ stdout=fetch_exception.response.stdout_response,
+ stderr=fetch_exception.response.stderr_response,
+ )
except Exception as e:
module.fail_json(msg=repr(e))
diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py
index aaa72d9ab..be2bb513f 100644
--- a/plugins/modules/zos_job_query.py
+++ b/plugins/modules/zos_job_query.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -266,7 +266,15 @@
def run_module():
-
+ """Initialize the module.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ Any exception while getting job params.
+ """
module_args = dict(
job_name=dict(type="str", required=False, default="*"),
owner=dict(type="str", required=False),
@@ -313,7 +321,27 @@ def run_module():
def query_jobs(job_name, job_id, owner):
-
+ """Returns jobs that coincide with the given arguments.
+
+ Parameters
+ ----------
+ job_name : str
+ Name of the jobs.
+ job_id : str
+ Id of the jobs.
+ owner : str
+ Owner of the jobs.
+
+ Returns
+ -------
+ Union[str]
+ List with the jobs.
+
+ Raises
+ ------
+ RuntimeError
+ No job with was found.
+ """
jobs = []
if job_id:
jobs = job_status(job_id=job_id)
@@ -327,6 +355,18 @@ def query_jobs(job_name, job_id, owner):
def parsing_jobs(jobs_raw):
+ """Parse job into an understandable format.
+
+ Parameters
+ ----------
+ jobs_raw : dict
+ Raw jobs.
+
+ Returns
+ -------
+ dict
+ Parsed jobs.
+ """
jobs = []
ret_code = {}
for job in jobs_raw:
diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py
index ee5faa0ac..bb3aac1ab 100644
--- a/plugins/modules/zos_job_submit.py
+++ b/plugins/modules/zos_job_submit.py
@@ -42,27 +42,17 @@
(e.g "/User/tester/ansible-playbook/sample.jcl")
location:
required: false
- default: DATA_SET
+ default: data_set
type: str
choices:
- - DATA_SET
- - USS
- - LOCAL
+ - data_set
+ - uss
+ - local
description:
- - The JCL location. Supported choices are ``DATA_SET``, ``USS`` or ``LOCAL``.
- - DATA_SET can be a PDS, PDSE, or sequential data set.
- - USS means the JCL location is located in UNIX System Services (USS).
- - LOCAL means locally to the ansible control node.
- wait:
- required: false
- default: false
- type: bool
- description:
- - Setting this option will yield no change, it is disabled. There is no
- need to set I(wait); setting I(wait_times_s) is the correct way to
- configure the amount of time to wait for a job to execute.
- - This option will be removed in ibm.ibm_zos_core collection version 1.10.0
- - See option I(wait_time_s).
+ - The JCL location. Supported choices are C(data_set), C(uss) or C(local).
+ - C(data_set) can be a PDS, PDSE, or sequential data set.
+ - C(uss) means the JCL location is located in UNIX System Services (USS).
+ - C(local) means locally to the ansible control node.
wait_time_s:
required: false
default: 10
@@ -90,17 +80,17 @@
required: false
type: str
description:
- - The volume serial (VOLSER)is where the data set resides. The option
+ - The volume serial (VOLSER) is where the data set resides. The option
is required only when the data set is not cataloged on the system.
- When configured, the L(zos_job_submit,./zos_job_submit.html) will try to
catalog the data set for the volume serial. If it is not able to, the
module will fail.
- - Ignored for I(location=USS) and I(location=LOCAL).
+ - Ignored for I(location=uss) and I(location=local).
encoding:
description:
- Specifies which encoding the local JCL file should be converted from
and to, before submitting the job.
- - This option is only supported for when I(location=LOCAL).
+ - This option is only supported for when I(location=local).
- If this parameter is not provided, and the z/OS systems default encoding
can not be identified, the JCL file will be converted from UTF-8 to
IBM-1047 by default, otherwise the module will detect the z/OS system
@@ -244,16 +234,20 @@
- Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error.
- Job status `SYS` indicates a system failure.
- Job status `?` indicates status can not be determined.
+ - Jobs where status can not be determined will result in None (NULL).
type: str
sample: AC
msg_code:
description:
- The return code from the submitted job as a string.
+ - Jobs which have no return code will result in None (NULL), such
+ is the case of a job that errors or is active.
type: str
sample: 0000
msg_txt:
description:
- Returns additional information related to the submitted job.
+ - Returns additional information related to the submitted job.
+ - Jobs which have no additional information will result in None (NULL).
type: str
sample: The job JOB00551 was run with special job processing TYPRUN=SCAN.
This will result in no completion, return code or job steps and
@@ -261,8 +255,8 @@
code:
description:
- The return code converted to an integer value when available.
- - Jobs which have no return code will return NULL, such is the case
- of a job that errors or is active.
+ - Jobs which have no return code will result in None (NULL), such
+ is the case of a job that errors or is active.
type: int
sample: 0
steps:
@@ -561,30 +555,25 @@
"system": "STL1"
}
]
-message:
- description: This option is being deprecated
- returned: success
- type: str
- sample: Submit JCL operation succeeded.
"""
EXAMPLES = r"""
-- name: Submit JCL in a PDSE member
+- name: Submit JCL in a PDSE member.
zos_job_submit:
src: HLQ.DATA.LLQ(SAMPLE)
- location: DATA_SET
+ location: data_set
register: response
- name: Submit JCL in USS with no DDs in the output.
zos_job_submit:
src: /u/tester/demo/sample.jcl
- location: USS
+ location: uss
return_output: false
- name: Convert local JCL to IBM-037 and submit the job.
zos_job_submit:
src: /Users/maxy/ansible-playbooks/provision/sample.jcl
- location: LOCAL
+ location: local
encoding:
from: ISO8859-1
to: IBM-037
@@ -592,25 +581,25 @@
- name: Submit JCL in an uncataloged PDSE on volume P2SS01.
zos_job_submit:
src: HLQ.DATA.LLQ(SAMPLE)
- location: DATA_SET
+ location: data_set
volume: P2SS01
- name: Submit a long running PDS job and wait up to 30 seconds for completion.
zos_job_submit:
src: HLQ.DATA.LLQ(LONGRUN)
- location: DATA_SET
+ location: data_set
wait_time_s: 30
- name: Submit a long running PDS job and wait up to 30 seconds for completion.
zos_job_submit:
src: HLQ.DATA.LLQ(LONGRUN)
- location: DATA_SET
+ location: data_set
wait_time_s: 30
- name: Submit JCL and set the max return code the module should fail on to 16.
zos_job_submit:
src: HLQ.DATA.LLQ
- location: DATA_SET
+ location: data_set
max_rc: 16
"""
@@ -621,7 +610,7 @@
BetterArgParser,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import (
- job_output, search_dictionaries, JOB_ERROR_STATUS
+ job_output, search_dictionaries, JOB_ERROR_STATUSES
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
ZOAUImportError,
@@ -633,6 +622,7 @@
DataSet,
)
from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
from timeit import default_timer as timer
from os import remove
import traceback
@@ -640,9 +630,9 @@
import re
try:
- from zoautil_py import exceptions
+ from zoautil_py import exceptions as zoau_exceptions
except ImportError:
- exceptions = ZOAUImportError(traceback.format_exc())
+ zoau_exceptions = ZOAUImportError(traceback.format_exc())
try:
from zoautil_py import jobs
@@ -650,40 +640,64 @@
jobs = ZOAUImportError(traceback.format_exc())
-JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"])
+JOB_STATUSES = list(dict.fromkeys(JOB_ERROR_STATUSES))
+JOB_STATUSES.append("CC")
+
JOB_SPECIAL_PROCESSING = frozenset(["TYPRUN"])
MAX_WAIT_TIME_S = 86400
-def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, start_time=timer()):
- """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set.
-
- Arguments:
- module - module instnace to access the module api
- src (str) - JCL, can be relative or absolute paths either on controller or USS
- - Data set, can be PS, PDS, PDSE Member
- src_name (str) - the src name that was provided in the module because through
- the runtime src could be replace with a temporary file name
- timeout (int) - how long to wait in seconds for a job to complete
- hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all
- JCL local to a controller is transfered to USS thus would be
- True
- volume (str) - volume the data set JCL is located on that will be cataloged before
- being submitted
- start_time - time the JCL started its submission
-
- Returns:
- job_submitted_id - the JCL job ID returned from submitting a job, else if no
- job submits, None will be returned
- duration - how long the job ran for in this method
+def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=None, start_time=timer()):
+ """Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set.
+
+ Parameters
+ ----------
+ module: AnsibleModule
+ module instance to access the module api.
+ src : str
+ JCL, can be relative or absolute paths either on controller or USS
+ - Data set, can be PS, PDS, PDSE Member.
+ src_name : str
+ The src name that was provided in the module because through
+ the runtime src could be replace with a temporary file name.
+ timeout : int
+ How long to wait in seconds for a job to complete.
+ is_unix : bool
+ True if JCL is a file in USS, otherwise False; Note that all
+ JCL local to a controller is transfered to USS thus would be
+ True.
+ volume : str
+ volume the data set JCL is located on that will be cataloged before
+ being submitted.
+ start_time : int
+ time the JCL started its submission.
+
+ Returns
+ -------
+ str
+ the JCL job ID returned from submitting a job, else if no
+ job submits, None will be returned.
+ int
+ how long the job ran for in this method.
+
+ Raises
+ ------
+ fail_json
+ Unable to submit job because the data set could not be cataloged on the volume.
+ fail_json
+ Unable to submit job, the job submission has failed.
+ fail_json
+ The JCL has been submitted but there was an error while fetching its status.
+ fail_json
+ The job has been submitted and no job id was returned.
"""
kwargs = {
- "timeout": timeout,
- "hfs": hfs,
+ # Since every fetch retry waits for a second before continuing,
+ # we can just pass the timeout (also in seconds) to this arg.
+ "fetch_max_retries": timeout,
}
- wait = True # Wait is always true because the module requires wait_time_s > 0
present = False
duration = 0
job_submitted = None
@@ -704,9 +718,9 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None,
"not be cataloged on the volume {1}.".format(src, volume))
module.fail_json(**result)
- job_submitted = jobs.submit(src, wait, None, **kwargs)
+ job_submitted = jobs.submit(src, is_unix=is_unix, **kwargs)
- # Introducing a sleep to ensure we have the result of job sumbit carrying the job id
+ # Introducing a sleep to ensure we have the result of job submit carrying the job id.
while (job_submitted is None and duration <= timeout):
current_time = timer()
duration = round(current_time - start_time)
@@ -716,69 +730,87 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None,
# which is what ZOAU sends back, opitonally we can check the 'status' as
# that is sent back as `AC` when the job is not complete but the problem
# with monitoring 'AC' is that STARTED tasks never exit the AC status.
+ job_fetched = None
+ job_fetch_rc = None
+ job_fetch_status = None
+
if job_submitted:
- job_listing_rc = jobs.listing(job_submitted.id)[0].rc
- job_listing_status = jobs.listing(job_submitted.id)[0].status
+ try:
+ job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0]
+ job_fetch_rc = job_fetched.return_code
+ job_fetch_status = job_fetched.status
+ except zoau_exceptions.JobFetchException:
+ pass
# Before moving forward lets ensure our job has completed but if we see
- # status that matches one in JOB_ERROR_STATUS, don't wait, let the code
+ # status that matches one in JOB_STATUSES, don't wait, let the code
# drop through and get analyzed in the main as it will scan the job ouput
- # Any match to JOB_ERROR_STATUS ends our processing and wait times
- while (job_listing_status not in JOB_ERROR_STATUS and
- job_listing_status == 'AC' and
- ((job_listing_rc is None or len(job_listing_rc) == 0 or
- job_listing_rc == '?') and duration < timeout)):
+ # Any match to JOB_STATUSES ends our processing and wait times
+ while (job_fetch_status not in JOB_STATUSES and
+ job_fetch_status == 'AC' and
+ ((job_fetch_rc is None or len(job_fetch_rc) == 0 or
+ job_fetch_rc == '?') and duration < timeout)):
current_time = timer()
duration = round(current_time - start_time)
sleep(1)
- job_listing_rc = jobs.listing(job_submitted.id)[0].rc
- job_listing_status = jobs.listing(job_submitted.id)[0].status
+ try:
+ job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0]
+ job_fetch_rc = job_fetched.return_code
+ job_fetch_status = job_fetched.status
+ # Allow for jobs that need more time to be fectched to run the wait_time_s
+ except zoau_exceptions.JobFetchException as err:
+ if duration >= timeout:
+ raise err
+ else:
+ continue
- # ZOAU throws a ZOAUException when the job sumbission fails thus there is no
+ # ZOAU throws a JobSubmitException when the job sumbission fails thus there is no
# JCL RC to share with the user, if there is a RC, that will be processed
# in the job_output parser.
- except exceptions.ZOAUException as err:
+ except zoau_exceptions.JobSubmitException as err:
result["changed"] = False
result["failed"] = True
- result["stderr"] = str(err)
+ result["stderr"] = to_text(err)
result["duration"] = duration
- result["job_id"] = job_submitted.id if job_submitted else None
+ result["job_id"] = job_submitted.job_id if job_submitted else None
result["msg"] = ("Unable to submit job {0}, the job submission has failed. "
"Without the job id, the error can not be determined. "
"Consider using module `zos_job_query` to poll for the "
"job by name or review the system log for purged jobs "
- "resulting from an abend.".format(src_name))
+ "resulting from an abend. Standard error may have "
+ "additional information.".format(src_name))
module.fail_json(**result)
- # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id
- # has been returned within the allocated time.
- except exceptions.JobSubmitException as err:
+ # ZOAU throws a JobFetchException when it is unable to fetch a job.
+ # This could happen while trying to fetch a job still running.
+ except zoau_exceptions.JobFetchException as err:
result["changed"] = False
result["failed"] = False
- result["stderr"] = str(err)
+ result["stderr"] = to_text(err)
result["duration"] = duration
- result["job_id"] = job_submitted.id if job_submitted else None
- result["msg"] = ("The JCL has been submitted {0} and no job id was returned "
- "within the allocated time of {1} seconds. Consider using "
- " module zos_job_query to poll for a long running "
- "jobs or increasing the value for "
- "`wait_times_s`.".format(src_name, str(timeout)))
+ result["job_id"] = job_submitted.job_id
+ _msg_detail = "the job with status {0}".format(job_fetch_status) if job_fetch_status else "its status"
+ result["msg"] = ("The JCL has been submitted {0} with ID {1} but there was an "
+ "error while fetching {2} within the allocated time of {3} "
+ "seconds. Consider using module zos_job_query to poll for the "
+ "job for more information. Standard error may have additional "
+ "information.".format(src_name, job_submitted.job_id, _msg_detail, str(timeout)))
module.fail_json(**result)
- # Between getting a job_submitted and the jobs.listing(job_submitted.id)[0].rc
+ # Between getting a job_submitted and the jobs.fetch_multiple(job_submitted.job_id)[0].return_code
# is enough time for the system to purge an invalid job, so catch it and let
# it fall through to the catchall.
except IndexError:
job_submitted = None
# There appears to be a small fraction of time when ZOAU has a handle on the
- # job and and suddenly its purged, this check is to ensure the job is there
+ # job and suddenly its purged, this check is to ensure the job is there
# long after the purge else we throw an error here if its been purged.
if job_submitted is None:
result["changed"] = False
result["failed"] = True
result["duration"] = duration
- result["job_id"] = job_submitted.id if job_submitted else None
+ result["job_id"] = job_submitted.job_id if job_submitted else None
result["msg"] = ("The job {0} has been submitted and no job id was returned "
"within the allocated time of {1} seconds. Without the "
"job id, the error can not be determined, consider using "
@@ -787,19 +819,25 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None,
"abend.".format(src_name, str(timeout)))
module.fail_json(**result)
- return job_submitted.id if job_submitted else None, duration
+ return job_submitted.job_id if job_submitted else None, duration
def run_module():
+ """Initialize module.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ The value for option 'wait_time_s' is not valid.
+ """
module_args = dict(
src=dict(type="str", required=True),
- wait=dict(type="bool", required=False, default=False,
- removed_at_date='2022-11-30',
- removed_from_collection='ibm.ibm_zos_core'),
location=dict(
type="str",
- default="DATA_SET",
- choices=["DATA_SET", "USS", "LOCAL"],
+ default="data_set",
+ choices=["data_set", "uss", "local"],
),
encoding=dict(
type="dict",
@@ -866,12 +904,10 @@ def run_module():
arg_defs = dict(
src=dict(arg_type="data_set_or_path", required=True),
- wait=dict(arg_type="bool", required=False, removed_at_date='2022-11-30',
- removed_from_collection='ibm.ibm_zos_core'),
location=dict(
arg_type="str",
- default="DATA_SET",
- choices=["DATA_SET", "USS", "LOCAL"],
+ default="data_set",
+ choices=["data_set", "uss", "local"],
),
from_encoding=dict(
arg_type="encoding", default=Defaults.DEFAULT_ASCII_CHARSET, required=False),
@@ -898,12 +934,11 @@ def run_module():
# Extract values from set module options
location = parsed_args.get("location")
volume = parsed_args.get("volume")
- parsed_args.get("wait")
src = parsed_args.get("src")
return_output = parsed_args.get("return_output")
wait_time_s = parsed_args.get("wait_time_s")
max_rc = parsed_args.get("max_rc")
- temp_file = parsed_args.get("src") if location == "LOCAL" else None
+ temp_file = parsed_args.get("src") if location == "local" else None
# Default 'changed' is False in case the module is not able to execute
result = dict(changed=False)
@@ -917,15 +952,15 @@ def run_module():
job_submitted_id = None
duration = 0
start_time = timer()
- if location == "DATA_SET":
+ if location == "data_set":
job_submitted_id, duration = submit_src_jcl(
- module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time)
- elif location == "USS":
+ module, src, src_name=src, timeout=wait_time_s, is_unix=False, volume=volume, start_time=start_time)
+ elif location == "uss":
job_submitted_id, duration = submit_src_jcl(
- module, src, src_name=src, timeout=wait_time_s, hfs=True)
- elif location == "LOCAL":
+ module, src, src_name=src, timeout=wait_time_s, is_unix=True)
+ elif location == "local":
job_submitted_id, duration = submit_src_jcl(
- module, src, src_name=src, timeout=wait_time_s, hfs=True)
+ module, src, src_name=src, timeout=wait_time_s, is_unix=True)
# Explictly pass None for the unused args else a default of '*' will be
# used and return undersirable results
@@ -959,7 +994,7 @@ def run_module():
result["jobs"] = job_output_txt
job_ret_code = job_output_txt[0].get("ret_code")
job_ret_code.update({"msg_txt": _msg_suffix})
- result["msg"] = _msg + _msg_suffix
+ result["msg"] = _msg
module.exit_json(**result)
# Job has submitted, the module changed the managed node
@@ -970,31 +1005,25 @@ def run_module():
job_ret_code = job_output_txt[0].get("ret_code")
if job_ret_code:
- job_msg = job_ret_code.get("msg")
- job_code = job_ret_code.get("code")
-
- # ret_code["msg"] should never be empty where a ret_code["code"] can be None,
- # for example, ret_code["msg"] could be populated with an ABEND which has
- # no corresponding ret_code["code"], if empty something is severe.
- if job_msg is None:
- _msg = ("Unable to find a 'msg' in the 'ret_code' dictionary, "
- "please review the job log.")
- result["stderr"] = _msg
- raise Exception(_msg)
+ job_ret_code_msg = job_ret_code.get("msg")
+ job_ret_code_code = job_ret_code.get("code")
+ job_ret_code_msg_code = job_ret_code.get("msg_code")
if return_output is True and max_rc is not None:
- is_changed = assert_valid_return_code(max_rc, job_code, job_ret_code)
-
- if re.search("^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)), job_msg):
- # If the job_msg doesn't have a CC, it is an improper completion (error/abend)
- if re.search("^(?:CC)", job_msg) is None:
- _msg = ("The job completion code (CC) was not in the job log. "
- "Please review the error {0} and the job log.".format(job_msg))
- result["stderr"] = _msg
- raise Exception(_msg)
-
- if job_code is None:
- # If there is no job_code (Job return code) it may NOT be an error,
+ is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, result)
+
+ if job_ret_code_msg is not None:
+ if re.search("^(?:{0})".format("|".join(JOB_STATUSES)), job_ret_code_msg):
+ # If the job_ret_code_msg doesn't have a CC (completion code), the job failed.
+ if re.search("^(?:CC)", job_ret_code_msg) is None:
+ _msg = ("The job completion code (CC) was not in the job log. "
+ "please review the job log for status {0}.".format(job_ret_code_msg))
+ result["stderr"] = _msg
+ job_ret_code.update({"msg_txt": _msg})
+ raise Exception(_msg)
+
+ if job_ret_code_code is None:
+ # If there is no job_ret_code_code (Job return code) it may NOT be an error,
# some jobs will never return have an RC, eg Jobs with TYPRUN=*,
# Started tasks (which are not supported) so further analyze the
# JESJCL DD to figure out if its a TYPRUN job
@@ -1005,11 +1034,15 @@ def run_module():
# Its possible jobs don't have a JESJCL which are active and this would
# cause an index out of range error.
if not jes_jcl_dd:
- raise Exception("The job return code was not available in the job log, "
- "please review the job log and status {0}.".format(job_msg))
+ _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "."
+ _msg = ("The job return code was not available in the job log, "
+ "please review the job log{0}".format(_msg_detail))
+ job_ret_code.update({"msg_txt": _msg})
+ raise Exception(_msg)
jes_jcl_dd_content = jes_jcl_dd[0].get("content")
jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content)
+
# The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in.
special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)"
.format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str)
@@ -1024,12 +1057,24 @@ def run_module():
.format(job_submitted_id, special_processing_keyword[0])})
is_changed = False
else:
- raise Exception("The job return code was not available in the job log, "
- "please review the job log and error {0}.".format(job_msg))
-
- elif job_code != 0 and max_rc is None:
- raise Exception("The job return code {0} was non-zero in the "
- "job output, this job has failed.".format(str(job_code)))
+ # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated
+ # so check both and provide a proper response.
+
+ if job_ret_code_msg_code is None:
+ _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "."
+ _msg = ("The job return code was not available in the job log, "
+ "please review the job log{0}".format(_msg_detail))
+ job_ret_code.update({"msg_txt": _msg})
+ raise Exception(_msg)
+
+ # raise Exception("The job return code was not available in the job log, "
+ # "please review the job log and error {0}.".format(job_ret_code_msg))
+ elif job_ret_code_code != 0 and max_rc is None:
+ _msg = ("The job return code {0} was non-zero in the "
+ "job output, this job has failed.".format(str(job_ret_code_code)))
+ job_ret_code.update({"msg_txt": _msg})
+ result["stderr"] = _msg
+ raise Exception(_msg)
if not return_output:
for job in result.get("jobs", []):
@@ -1044,18 +1089,13 @@ def run_module():
result["stderr"] = _msg
result["jobs"] = None
raise Exception(_msg)
-
except Exception as err:
result["failed"] = True
result["changed"] = False
result["msg"] = ("The JCL submitted with job id {0} but "
"there was an error, please review "
"the error for further details: {1}".format
- (str(job_submitted_id), str(err)))
- if job_output_txt:
- job_ret_code = job_output_txt[0].get("ret_code")
- if job_ret_code:
- job_ret_code.update({"msg_txt": str(err)})
+ (str(job_submitted_id), to_text(err)))
module.exit_json(**result)
finally:
@@ -1068,33 +1108,68 @@ def run_module():
module.exit_json(**result)
-def assert_valid_return_code(max_rc, job_rc, ret_code):
+def assert_valid_return_code(max_rc, job_rc, ret_code, result):
+ """Asserts valid return code.
+
+ Parameters
+ ----------
+ max_rc : int
+ Max return code.
+ joc_rc : int
+ Job return code.
+ ret_code : int
+ Return code.
+ result : dict()
+ Result dictionary.
+
+ Returns
+ -------
+ bool
+ If job_rc is not 0.
+
+ Raises
+ ------
+ Exception
+ The job return code was not available in the jobs output.
+ Exception
+ The job return code for the submitted job is greater than the value set for option 'max_rc'.
+ Exception
+ The step return code for the submitted job is greater than the value set for option 'max_rc'.
+ """
+
if job_rc is None:
raise Exception(
"The job return code (ret_code[code]) was not available in the jobs output, "
"this job has failed.")
if job_rc > max_rc:
- raise Exception("The job return code, 'ret_code[code]' {0} for the submitted job is "
- "greater than the value set for option 'max_rc' {1}. "
- "Increase the value for 'max_rc' otherwise this job submission "
- "has failed.".format(str(job_rc), str(max_rc)))
+ _msg = ("The job return code, 'ret_code[code]' {0} for the submitted job is "
+ "greater than the value set for option 'max_rc' {1}. "
+ "Increase the value for 'max_rc' otherwise this job submission "
+ "has failed.".format(str(job_rc), str(max_rc)))
+ ret_code.update({"msg_txt": _msg})
+ result["stderr"] = _msg
+ raise Exception(_msg)
for step in ret_code["steps"]:
step_cc_rc = int(step["step_cc"])
step_name_for_rc = step["step_name"]
if step_cc_rc > max_rc:
- raise Exception("The step name {0} with return code {1} for the submitted job is "
- "greater than the value set for option 'max_rc' {2}. "
- "Increase the value for 'max_rc' otherwise this job submission "
- "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc)))
-
+ _msg = ("The step name {0} with return code {1} for the submitted job is "
+ "greater than the value set for option 'max_rc' {2}. "
+ "Increase the value for 'max_rc' otherwise this job submission "
+ "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc)))
+ ret_code.update({"msg_txt": _msg})
+ result["stderr"] = _msg
+ raise Exception(_msg)
# If there is NO exception rasied it means that max_rc is larger than the
# actual RC from the submitted job. In this case, the ansible changed status
# should NOT be 'changed=true' even though the user did override the return code,
# a non-zero return code means the job did not change anything, so set it as
# result["chagned"]=False,
- if job_rc != 0:
+ if max_rc and job_rc > max_rc:
+ return False
+ elif job_rc != 0 and max_rc is None:
return False
return True
diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py
index 6536509fd..0988ef2d4 100644
--- a/plugins/modules/zos_lineinfile.py
+++ b/plugins/modules/zos_lineinfile.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -229,15 +229,14 @@
src: /tmp/src/somefile
regexp: '^(.*)User(\d+)m(.*)$'
line: '\1APPUser\3'
- backrefs: yes
+ backrefs: true
- name: Add a line to a member while a task is in execution
zos_lineinfile:
src: SOME.PARTITIONED.DATA.SET(DATA)
insertafter: EOF
line: 'Should be a working test now'
- force: True
-
+ force: true
"""
RETURN = r"""
@@ -275,18 +274,19 @@
sample: /path/to/file.txt.2015-02-03@04:15~
"""
import json
+import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
better_arg_parser, data_set, backup as Backup)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
try:
from zoautil_py import datasets
except Exception:
- datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
# supported data set types
@@ -294,40 +294,52 @@
def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs, force):
- """Replace a line with the matching regex pattern
- Insert a line before/after the matching pattern
- Insert a line at BOF/EOF
-
- Arguments:
- src: {str} -- The z/OS USS file or data set to modify.
- line: {str} -- The line to insert/replace into the src.
- regexp: {str} -- The regular expression to look for in every line of the src.
- If regexp matches, ins_aft/ins_bef will be ignored.
- ins_aft: {str} -- Insert the line after matching '*regex*' pattern or EOF.
- choices:
- - EOF
- - '*regex*'
- ins_bef: {str} -- Insert the line before matching '*regex*' pattern or BOF.
- choices:
- - BOF
- - '*regex*'
- encoding: {str} -- Encoding of the src.
- first_match: {bool} -- Take the first matching regex pattern.
- backrefs: {bool} -- Back reference
- force: {bool} -- force for modify a member part of a task in execution
-
- Returns:
- str -- Information in JSON format. keys:
- cmd: {str} -- dsed shell command
- found: {int} -- Number of matching regex pattern
- changed: {bool} -- Indicates if the source was modified.
+ """Replace a line with the matching regex pattern.
+ Insert a line before/after the matching pattern.
+ Insert a line at BOF/EOF.
+
+ Parameters
+ ----------
+ src : str
+ The z/OS USS file or data set to modify.
+ line : str
+ The line to insert/replace into the src.
+ regexp : str
+ The regular expression to look for in every line of the src.
+ If regexp matches, ins_aft/ins_bef will be ignored.
+ ins_aft : str
+ Insert the line after matching '*regex*' pattern or EOF.
+ choices:
+ - EOF
+ - '*regex*'
+ ins_bef : str
+ Insert the line before matching '*regex*' pattern or BOF.
+ choices:
+ - BOF
+ - '*regex*'
+ encoding : str
+ Encoding of the src.
+ first_match : bool
+ Take the first matching regex pattern.
+ backrefs : bool
+ Back reference.
+ force : bool
+ force for modify a member part of a task in execution.
+
+ Returns
+ -------
+ str
+ Information in JSON format. keys:
+ cmd {str} -- dsed shell command
+ found {int} -- Number of matching regex pattern
+ changed {bool} -- Indicates if the source was modified.
"""
return datasets.lineinfile(
src,
line,
regex=regexp,
- ins_aft=ins_aft,
- ins_bef=ins_bef,
+ insert_after=ins_aft,
+ insert_before=ins_bef,
encoding=encoding,
first_match=first_match,
backref=backrefs,
@@ -338,26 +350,46 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs
def absent(src, line, regexp, encoding, force):
- """Delete lines with matching regex pattern
-
- Arguments:
- src: {str} -- The z/OS USS file or data set to modify.
- line: {str} -- The line to be deleted in the src. If line matches,
- regexp will be ignored.
- regexp: {str} -- The regular expression to look for in every line of the src.
- encoding: {str} -- Encoding of the src.
- force: {bool} -- force for modify a member part of a task in execution
-
- Returns:
- str -- Information in JSON format. keys:
- cmd: {str} -- dsed shell command
- found: {int} -- Number of matching regex pattern
- changed: {bool} -- Indicates if the source was modified.
+ """Delete lines with matching regex pattern.
+
+ Parameters
+ ----------
+ src : str
+ The z/OS USS file or data set to modify.
+ line : str
+ The line to be deleted in the src. If line matches,
+ regexp will be ignored.
+ regexp : str
+ The regular expression to look for in every line of the src.
+ encoding : str
+ Encoding of the src.
+ force : bool
+ Force for modify a member part of a task in execution.
+
+ Returns
+ -------
+ str
+ Information in JSON format. keys:
+ cmd {str} -- dsed shell command
+ found {int} -- Number of matching regex pattern
+ changed {bool} -- Indicates if the source was modified.
"""
return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True, force=force)
def quotedString(string):
+ """Add escape if string was quoted.
+
+ Parameters
+ ----------
+ string : str
+ Given string.
+
+ Returns
+ -------
+ str
+ The string with the quote marks replaced.
+ """
# add escape if string was quoted
if not isinstance(string, str):
return string
@@ -365,6 +397,27 @@ def quotedString(string):
def main():
+ """Initialize the module.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ regexp is required with backrefs=true.
+ fail_json
+ line is required with state=present.
+ fail_json
+ One of line or regexp is required with state=absent.
+ fail_json
+ Source does not exist.
+ fail_json
+ Data set type is NOT supported.
+ fail_json
+ Creating backup has failed.
+ fail_json
+ dsed return content is NOT in json format.
+ """
module_args = dict(
src=dict(
type='str',
@@ -488,36 +541,36 @@ def main():
stdout = return_content.stdout_response
stderr = return_content.stderr_response
rc = return_content.rc
+ stdout = stdout.replace('/c\\', '/c\\\\')
+ stdout = stdout.replace('/a\\', '/a\\\\')
+ stdout = stdout.replace('/i\\', '/i\\\\')
+ stdout = stdout.replace('$ a\\', '$ a\\\\')
+ stdout = stdout.replace('1 i\\', '1 i\\\\')
+ stdout = stdout.replace('/d', '\\\\d')
+ if line:
+ stdout = stdout.replace(line, quotedString(line))
+ if regexp:
+ stdout = stdout.replace(regexp, quotedString(regexp))
+ if ins_aft:
+ stdout = stdout.replace(ins_aft, quotedString(ins_aft))
+ if ins_bef:
+ stdout = stdout.replace(ins_bef, quotedString(ins_bef))
try:
- # change the return string to be loadable by json.loads()
- stdout = stdout.replace('/c\\', '/c\\\\')
- stdout = stdout.replace('/a\\', '/a\\\\')
- stdout = stdout.replace('/i\\', '/i\\\\')
- stdout = stdout.replace('$ a\\', '$ a\\\\')
- stdout = stdout.replace('1 i\\', '1 i\\\\')
- if line:
- stdout = stdout.replace(line, quotedString(line))
- if regexp:
- stdout = stdout.replace(regexp, quotedString(regexp))
- if ins_aft:
- stdout = stdout.replace(ins_aft, quotedString(ins_aft))
- if ins_bef:
- stdout = stdout.replace(ins_bef, quotedString(ins_bef))
- # Try to extract information from return_content
ret = json.loads(stdout)
- result['cmd'] = ret['cmd']
- result['changed'] = ret['changed']
- result['found'] = ret['found']
- # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case
- # That information will be given with 'changed' and 'found'
- if len(stderr):
- result['stderr'] = str(stderr)
- result['rc'] = rc
except Exception:
messageDict = dict(msg="dsed return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc)
if result.get('backup_name'):
messageDict['backup_name'] = result['backup_name']
module.fail_json(**messageDict)
+
+ result['cmd'] = ret['cmd']
+ result['changed'] = ret['changed']
+ result['found'] = ret['found']
+ # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case
+ # That information will be given with 'changed' and 'found'
+ if len(stderr):
+ result['stderr'] = str(stderr)
+ result['rc'] = rc
module.exit_json(**result)
diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py
index 3f4c642f3..7b4b04654 100644
--- a/plugins/modules/zos_mount.py
+++ b/plugins/modules/zos_mount.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020 - 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -48,13 +48,13 @@
description:
- The type of file system that will be mounted.
- The physical file systems data set format to perform the logical mount.
- - The I(fs_type) is required to be uppercase.
+ - The I(fs_type) is required to be lowercase.
type: str
choices:
- - HFS
- - ZFS
- - NFS
- - TFS
+ - hfs
+ - zfs
+ - nfs
+ - tfs
required: True
state:
description:
@@ -168,33 +168,33 @@
file hierarchy).
type: str
choices:
- - DRAIN
- - FORCE
- - IMMEDIATE
- - NORMAL
- - REMOUNT
- - RESET
+ - drain
+ - force
+ - immediate
+ - normal
+ - remount
+ - reset
required: False
- default: NORMAL
+ default: normal
mount_opts:
description:
- Options available to the mount.
- - If I(mount_opts=RO) on a mounted/remount, mount is performed
+ - If I(mount_opts=ro) on a mounted/remount, mount is performed
read-only.
- - If I(mount_opts=SAME) and (unmount_opts=REMOUNT), mount is opened
+ - If I(mount_opts=same) and (unmount_opts=remount), mount is opened
in the same mode as previously opened.
- - If I(mount_opts=NOWAIT), mount is performed asynchronously.
- - If I(mount_opts=NOSECURITY), security checks are not enforced for
+ - If I(mount_opts=nowait), mount is performed asynchronously.
+ - If I(mount_opts=nosecurity), security checks are not enforced for
files in this file system.
type: str
choices:
- - RO
- - RW
- - SAME
- - NOWAIT
- - NOSECURITY
+ - ro
+ - rw
+ - same
+ - nowait
+ - nosecurity
required: False
- default: RW
+ default: rw
src_params:
description:
- Specifies a parameter string to be passed to the file system type.
@@ -206,15 +206,15 @@
description:
- If present, tags get written to any untagged file.
- When the file system is unmounted, the tags are lost.
- - If I(tag_untagged=NOTEXT) none of the untagged files in the file system are
+ - If I(tag_untagged=notext) none of the untagged files in the file system are
automatically converted during file reading and writing.
- - If I(tag_untagged=TEXT) each untagged file is implicitly marked as
+ - If I(tag_untagged=text) each untagged file is implicitly marked as
containing pure text data that can be converted.
- If this flag is used, use of tag_ccsid is encouraged.
type: str
choices:
- - TEXT
- - NOTEXT
+ - text
+ - notext
required: False
tag_ccsid:
description:
@@ -271,23 +271,23 @@
AUTOMOVE where the file system will be randomly moved to another system
(no system list used).
- >
- I(automove=AUTOMOVE) indicates that ownership of the file system can be
+ I(automove=automove) indicates that ownership of the file system can be
automatically moved to another system participating in a shared file system.
- >
- I(automove=NOAUTOMOVE) prevents movement of the file system's ownership in some situations.
+ I(automove=noautomove) prevents movement of the file system's ownership in some situations.
- >
- I(automove=UNMOUNT) allows the file system to be unmounted in some situations.
+ I(automove=unmount) allows the file system to be unmounted in some situations.
type: str
choices:
- - AUTOMOVE
- - NOAUTOMOVE
- - UNMOUNT
+ - automove
+ - noautomove
+ - unmount
required: False
- default: AUTOMOVE
+ default: automove
automove_list:
description:
- >
- If(automove=AUTOMOVE), this option will be checked.
+ If(automove=automove), this option will be checked.
- >
This specifies the list of servers to include or exclude as destinations.
- >
@@ -317,14 +317,14 @@
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
- name: Unmount a filesystem.
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: unmounted
unmount_opts: REMOUNT
opts: same
@@ -333,7 +333,7 @@
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
mount_opts: RO
@@ -341,37 +341,37 @@
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
persistent:
- data_store: SYS1.PARMLIB(BPXPRMAA)
- comment: For Tape2 project
+ data_store: SYS1.PARMLIB(BPXPRMAA)
+ comment: For Tape2 project
- name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB.
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
persistent:
- data_store: SYS1.PARMLIB(BPXPRMAA)
- backup: Yes
- backup_name: SYS1.PARMLIB(BPXPRMAB)
- comment: For Tape2 project
+ data_store: SYS1.PARMLIB(BPXPRMAA)
+ backup: true
+ backup_name: SYS1.PARMLIB(BPXPRMAB)
+ comment: For Tape2 project
- name: Mount a filesystem ignoring uid/gid values.
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
- allow_uid: no
+ allow_uid: false
- name: Mount a filesystem asynchronously (don't wait for completion).
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
opts: nowait
@@ -379,7 +379,7 @@
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
mount_opts: NOSECURITY
@@ -387,7 +387,7 @@
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
automove: AUTOMOVE
automove_list: I,DEV1,DEV2,DEV3,DEV9
@@ -396,11 +396,10 @@
zos_mount:
src: SOMEUSER.VVV.ZFS
path: /u/omvsadm/core
- fs_type: ZFS
+ fs_type: zfs
state: mounted
automove: AUTOMOVE
automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7
-
"""
RETURN = r"""
@@ -564,6 +563,31 @@
def mt_backupOper(module, src, backup, tmphlq=None):
+ """Makes a backup of the source.
+
+ Parameters
+ ----------
+ module : AnsibleModule
+ AnsibleModule.
+ src : str
+ Source USS file or MVS data set.
+ backup : str
+ Name for the backup.
+ tmphlq : str
+ HLQ to be used for backup dataset.
+
+ Returns
+ -------
+ str
+ Backup name.
+
+ Raises
+ ------
+ fail_json
+ Crating backup has failed.
+ fail_json
+ Data set type is NOT supported.
+ """
# analysis the file type
ds_utils = data_set.DataSetUtils(src)
file_type = ds_utils.ds_type()
@@ -590,11 +614,24 @@ def mt_backupOper(module, src, backup, tmphlq=None):
def swap_text(original, adding, removing):
- """
- swap_text returns original after removing blocks matching removing,
- and adding the adding param
- original now should be a list of lines without newlines
- return is the consolidated file value
+ """swap_text returns original after removing blocks matching removing,
+ and adding the adding param.
+ original now should be a list of lines without newlines.
+ return is the consolidated file value.
+
+ Parameters
+ ----------
+ original : str
+ Text to modify.
+ adding : str
+ Lines to add.
+ removing : str
+ Lines to delete if matched.
+
+ Returns
+ -------
+ str
+ The consolidated file value.
"""
content_lines = original
@@ -658,6 +695,37 @@ def swap_text(original, adding, removing):
def run_module(module, arg_def):
+ """Initialize module.
+
+ Parameters
+ ----------
+ arg_def : dict
+ Arguments to use.
+
+ Returns
+ -------
+ dict
+ Arguments.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ Mount source either is not cataloged or does not exist.
+ fail_json
+ Exception encountered during directory creation.
+ fail_json
+ Mount destination doesn't exist.
+ fail_json
+ Checking filesystem list failed with error.
+ fail_json
+ Exception encountered when running unmount.
+ fail_json
+ Exception occurred when running mount.
+ fail_json
+ Persistent data set is either not cataloged or does not exist.
+ """
# ********************************************************************
# Verify the validity of module args. BetterArgParser raises ValueError
# when a parameter fails its validation check
@@ -854,7 +922,7 @@ def run_module(module, arg_def):
src, path, fs_type
)
)
- if "RO" in mount_opts:
+ if "ro" in mount_opts:
subcmd = "READ"
else:
subcmd = "RDWR"
@@ -882,14 +950,14 @@ def run_module(module, arg_def):
fullcmd = fullcmd + " NOSETUID"
parmtext = parmtext + "\n NOSETUID"
- if "NOWAIT" in mount_opts:
+ if "nowait" in mount_opts:
fullcmd = fullcmd + " NOWAIT"
parmtext = parmtext + "\n NOWAIT"
else:
fullcmd = fullcmd + " WAIT"
parmtext = parmtext + "\n WAIT"
- if "NOSECURITY" in mount_opts:
+ if "nosecurity" in mount_opts:
fullcmd = fullcmd + " NOSECURITY"
parmtext = parmtext + "\n NOSECURITY"
else:
@@ -1042,6 +1110,8 @@ def run_module(module, arg_def):
def main():
+ """Initialize module when executed as main.
+ """
global module
module = AnsibleModule(
@@ -1051,10 +1121,10 @@ def main():
fs_type=dict(
type="str",
choices=[
- "HFS",
- "ZFS",
- "NFS",
- "TFS",
+ "hfs",
+ "zfs",
+ "nfs",
+ "tfs",
],
required=True,
),
@@ -1079,27 +1149,27 @@ def main():
),
unmount_opts=dict(
type="str",
- default="NORMAL",
- choices=["DRAIN", "FORCE", "IMMEDIATE", "NORMAL", "REMOUNT", "RESET"],
+ default="normal",
+ choices=["drain", "force", "immediate", "normal", "remount", "reset"],
required=False,
),
mount_opts=dict(
type="str",
- default="RW",
- choices=["RO", "RW", "SAME", "NOWAIT", "NOSECURITY"],
+ default="rw",
+ choices=["ro", "rw", "same", "nowait", "nosecurity"],
required=False,
),
src_params=dict(type="str", required=False),
tag_untagged=dict(
- type="str", choices=["TEXT", "NOTEXT"], required=False
+ type="str", choices=["text", "notext"], required=False
),
tag_ccsid=dict(type="int", required=False),
allow_uid=dict(type="bool", default=True, required=False),
sysname=dict(type="str", required=False),
automove=dict(
type="str",
- default="AUTOMOVE",
- choices=["AUTOMOVE", "NOAUTOMOVE", "UNMOUNT"],
+ default="automove",
+ choices=["automove", "noautomove", "unmount"],
required=False,
),
automove_list=dict(type="str", required=False),
@@ -1114,10 +1184,10 @@ def main():
fs_type=dict(
arg_type="str",
choices=[
- "HFS",
- "ZFS",
- "NFS",
- "TFS",
+ "hfs",
+ "zfs",
+ "nfs",
+ "tfs",
],
required=True,
),
@@ -1139,27 +1209,27 @@ def main():
),
unmount_opts=dict(
arg_type="str",
- default="NORMAL",
- choices=["DRAIN", "FORCE", "IMMEDIATE", "NORMAL", "REMOUNT", "RESET"],
+ default="normal",
+ choices=["drain", "force", "immediate", "normal", "remount", "reset"],
required=False,
),
mount_opts=dict(
arg_type="str",
- default="RW",
- choices=["RO", "RW", "SAME", "NOWAIT", "NOSECURITY"],
+ default="rw",
+ choices=["ro", "rw", "same", "nowait", "nosecurity"],
required=False,
),
src_params=dict(arg_type="str", default="", required=False),
tag_untagged=dict(
- arg_type="str", choices=["TEXT", "NOTEXT"], required=False
+ arg_type="str", choices=["text", "notext"], required=False
),
tag_ccsid=dict(arg_type="int", required=False),
allow_uid=dict(arg_type="bool", default=True, required=False),
sysname=dict(arg_type="str", default="", required=False),
automove=dict(
arg_type="str",
- default="AUTOMOVE",
- choices=["AUTOMOVE", "NOAUTOMOVE", "UNMOUNT"],
+ default="automove",
+ choices=["automove", "noautomove", "unmount"],
required=False,
),
automove_list=dict(arg_type="str", default="", required=False),
diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py
index 502d2ead7..25bfe56dc 100644
--- a/plugins/modules/zos_mvs_raw.py
+++ b/plugins/modules/zos_mvs_raw.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -125,9 +125,7 @@
choices:
- delete
- keep
- - catlg
- catalog
- - uncatlg
- uncatalog
disposition_abnormal:
description:
@@ -138,32 +136,30 @@
choices:
- delete
- keep
- - catlg
- catalog
- - uncatlg
- uncatalog
reuse:
description:
- - Determines if a data set should be reused if I(disposition=NEW) and if a data set with a matching name already exists.
+ - Determines if a data set should be reused if I(disposition=new) and if a data set with a matching name already exists.
- If I(reuse=true), I(disposition) will be automatically switched to C(SHR).
- If I(reuse=false), and a data set with a matching name already exists, allocation will fail.
- Mutually exclusive with I(replace).
- - I(reuse) is only considered when I(disposition=NEW)
+ - I(reuse) is only considered when I(disposition=new)
type: bool
default: false
replace:
description:
- - Determines if a data set should be replaced if I(disposition=NEW) and a data set with a matching name already exists.
+ - Determines if a data set should be replaced if I(disposition=new) and a data set with a matching name already exists.
- If I(replace=true), the original data set will be deleted, and a new data set created.
- If I(replace=false), and a data set with a matching name already exists, allocation will fail.
- Mutually exclusive with I(reuse).
- - I(replace) is only considered when I(disposition=NEW)
+ - I(replace) is only considered when I(disposition=new)
- I(replace) will result in loss of all data in the original data set unless I(backup) is specified.
type: bool
default: false
backup:
description:
- - Determines if a backup should be made of an existing data set when I(disposition=NEW), I(replace=true),
+ - Determines if a backup should be made of an existing data set when I(disposition=new), I(replace=true),
and a data set with the desired name is found.
- I(backup) is only used when I(replace=true).
type: bool
@@ -260,8 +256,8 @@
description:
- How the label for the key encrypting key specified by
I(label) is encoded by the Encryption Key Manager.
- - I(encoding) can either be set to C(L) for label encoding,
- or C(H) for hash encoding.
+ - I(encoding) can either be set to C(l) for label encoding,
+ or C(h) for hash encoding.
- Maps to KEYCD1 on z/OS.
type: str
required: true
@@ -289,8 +285,8 @@
description:
- How the label for the key encrypting key specified by
I(label) is encoded by the Encryption Key Manager.
- - I(encoding) can either be set to C(L) for label encoding,
- or C(H) for hash encoding.
+ - I(encoding) can either be set to C(l) for label encoding,
+ or C(h) for hash encoding.
- Maps to KEYCD2 on z/OS.
type: str
required: true
@@ -316,7 +312,7 @@
- The logical record length. (e.g C(80)).
- For variable data sets, the length must include the 4-byte prefix area.
- "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0."
- - Valid values are (1-32760 for non-vsam, 1-32761 for vsam).
+ - Valid values are (1-32760 for non-VSAM, 1-32761 for VSAM).
- Maps to LRECL on z/OS.
type: int
required: false
@@ -746,9 +742,7 @@
choices:
- delete
- keep
- - catlg
- catalog
- - uncatlg
- uncatalog
disposition_abnormal:
description:
@@ -759,32 +753,30 @@
choices:
- delete
- keep
- - catlg
- catalog
- - uncatlg
- uncatalog
reuse:
description:
- - Determines if data set should be reused if I(disposition=NEW) and a data set with matching name already exists.
+ - Determines if data set should be reused if I(disposition=new) and a data set with matching name already exists.
- If I(reuse=true), I(disposition) will be automatically switched to C(SHR).
- If I(reuse=false), and a data set with a matching name already exists, allocation will fail.
- Mutually exclusive with I(replace).
- - I(reuse) is only considered when I(disposition=NEW)
+ - I(reuse) is only considered when I(disposition=new)
type: bool
default: false
replace:
description:
- - Determines if data set should be replaced if I(disposition=NEW) and a data set with matching name already exists.
+ - Determines if data set should be replaced if I(disposition=new) and a data set with matching name already exists.
- If I(replace=true), the original data set will be deleted, and a new data set created.
- If I(replace=false), and a data set with a matching name already exists, allocation will fail.
- Mutually exclusive with I(reuse).
- - I(replace) is only considered when I(disposition=NEW)
+ - I(replace) is only considered when I(disposition=new)
- I(replace) will result in loss of all data in the original data set unless I(backup) is specified.
type: bool
default: false
backup:
description:
- - Determines if a backup should be made of existing data set when I(disposition=NEW), I(replace=true),
+ - Determines if a backup should be made of existing data set when I(disposition=new), I(replace=true),
and a data set with the desired name is found.
- I(backup) is only used when I(replace=true).
type: bool
@@ -881,8 +873,8 @@
description:
- How the label for the key encrypting key specified by
I(label) is encoded by the Encryption Key Manager.
- - I(encoding) can either be set to C(L) for label encoding,
- or C(H) for hash encoding.
+ - I(encoding) can either be set to C(l) for label encoding,
+ or C(h) for hash encoding.
- Maps to KEYCD1 on z/OS.
type: str
required: true
@@ -910,8 +902,8 @@
description:
- How the label for the key encrypting key specified by
I(label) is encoded by the Encryption Key Manager.
- - I(encoding) can either be set to C(L) for label encoding,
- or C(H) for hash encoding.
+ - I(encoding) can either be set to C(l) for label encoding,
+ or C(h) for hash encoding.
- Maps to KEYCD2 on z/OS.
type: str
required: true
@@ -988,7 +980,7 @@
path:
description:
- The path to an existing UNIX file.
- - Or provide the path to an new created UNIX file when I(status_group=OCREAT).
+ - Or provide the path to an new created UNIX file when I(status_group=ocreat).
- The provided path must be absolute.
required: true
type: str
@@ -1299,7 +1291,7 @@
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- reuse: yes
+ reuse: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1323,7 +1315,7 @@
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- reuse: yes
+ reuse: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1368,7 +1360,7 @@
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- reuse: yes
+ reuse: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1396,8 +1388,8 @@
dd_name: sysprint
data_set_name: mypgm.output.ds
disposition: new
- replace: yes
- backup: yes
+ replace: true
+ backup: true
type: seq
space_primary: 5
space_secondary: 1
@@ -1468,7 +1460,7 @@
- name: Take a set of data sets and write them to an archive.
zos_mvs_raw:
program_name: adrdssu
- auth: yes
+ auth: true
dds:
- dd_data_set:
dd_name: archive
@@ -1484,7 +1476,7 @@
- name: Merge two sequential data sets and write them to new data set
zos_mvs_raw:
program_name: sort
- auth: no
+ auth: false
parm: "MSGPRT=CRITICAL,LIST"
dds:
- dd_data_set:
@@ -1515,7 +1507,7 @@
files.
zos_mvs_raw:
pgm: idcams
- auth: yes
+ auth: true
dds:
- dd_concat:
dd_name: sysprint
@@ -1532,57 +1524,56 @@
dd_name: sysin
content: " LISTCAT ENTRIES('SYS1.*')"
-- name: Drop the contents of input dataset into output dataset
- using REPRO command.
+- name: Drop the contents of input dataset into output dataset using REPRO command.
zos_mvs_raw:
pgm: idcams
- auth: yes
+ auth: true
dds:
- - dd_data_set:
- dd_name: INPUT
- data_set_name: myhlq.ds1.input
- - dd_data_set:
- dd_name: OUTPUT
- data_set_name: myhlq.ds1.output
- - dd_input:
- dd_name: sysin
- content: |
+ - dd_data_set:
+ dd_name: INPUT
+ data_set_name: myhlq.ds1.input
+ - dd_data_set:
+ dd_name: OUTPUT
+ data_set_name: myhlq.ds1.output
+ - dd_input:
+ dd_name: sysin
+ content: |
" REPRO -
INFILE(INPUT) -
OUTFILE(OUTPUT)"
- - dd_output:
- dd_name: sysprint
- return_content:
- type: text
-
- - name: Define a cluster using a literal block style indicator
- with a 2 space indentation.
- zos_mvs_raw:
- program_name: idcams
- auth: yes
- dds:
- - dd_output:
- dd_name: sysprint
- return_content:
- type: text
- - dd_input:
- dd_name: sysin
- content: |2
- DEFINE CLUSTER -
- (NAME(ANSIBLE.TEST.VSAM) -
- CYL(10 10) -
- FREESPACE(20 20) -
- INDEXED -
- KEYS(32 0) -
- NOERASE -
- NONSPANNED -
- NOREUSE -
- SHAREOPTIONS(3 3) -
- SPEED -
- UNORDERED -
- RECORDSIZE(4086 32600) -
- VOLUMES(222222) -
- UNIQUE)
+ - dd_output:
+ dd_name: sysprint
+ return_content:
+ type: text
+
+- name: Define a cluster using a literal block style indicator
+ with a 2 space indentation.
+ zos_mvs_raw:
+ program_name: idcams
+ auth: true
+ dds:
+ - dd_output:
+ dd_name: sysprint
+ return_content:
+ type: text
+ - dd_input:
+ dd_name: sysin
+ content: 2
+ DEFINE CLUSTER -
+ (NAME(ANSIBLE.TEST.VSAM) -
+ CYL(10 10) -
+ FREESPACE(20 20) -
+ INDEXED -
+ KEYS(32 0) -
+ NOERASE -
+ NONSPANNED -
+ NOREUSE -
+ SHAREOPTIONS(3 3) -
+ SPEED -
+ UNORDERED -
+ RECORDSIZE(4086 32600) -
+ VOLUMES(222222) -
+ UNIQUE)
"""
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import (
@@ -1628,10 +1619,6 @@
backups = []
-# Use of global tmphlq to keep coherent classes definitions
-g_tmphlq = ""
-
-
def run_module():
"""Executes all module-related functions.
@@ -1645,11 +1632,11 @@ def run_module():
disposition=dict(type="str", choices=["new", "shr", "mod", "old"]),
disposition_normal=dict(
type="str",
- choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"],
+ choices=["delete", "keep", "catalog", "uncatalog"],
),
disposition_abnormal=dict(
type="str",
- choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"],
+ choices=["delete", "keep", "catalog", "uncatalog"],
),
space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]),
space_primary=dict(type="int"),
@@ -1839,8 +1826,7 @@ def run_module():
if not module.check_mode:
try:
parms = parse_and_validate_args(module.params)
- global g_tmphlq
- g_tmphlq = parms.get("tmp_hlq")
+ tmphlq = parms.get("tmp_hlq")
dd_statements = build_dd_statements(parms)
program = parms.get("program_name")
program_parm = parms.get("parm")
@@ -1852,6 +1838,7 @@ def run_module():
dd_statements=dd_statements,
authorized=authorized,
verbose=verbose,
+ tmp_hlq=tmphlq,
)
if program_response.rc != 0 and program_response.stderr:
raise ZOSRawError(
@@ -1888,11 +1875,11 @@ def parse_and_validate_args(params):
disposition=dict(type="str", choices=["new", "shr", "mod", "old"]),
disposition_normal=dict(
type="str",
- choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"],
+ choices=["delete", "keep", "catalog", "uncatalog"],
),
disposition_abnormal=dict(
type="str",
- choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"],
+ choices=["delete", "keep", "catalog", "uncatalog"],
),
space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]),
space_primary=dict(type="int"),
@@ -2408,7 +2395,7 @@ def build_dd_statements(parms):
dd_statements = []
for dd in parms.get("dds"):
dd_name = get_dd_name(dd)
- dd = set_extra_attributes_in_dd(dd)
+ dd = set_extra_attributes_in_dd(dd, parms)
data_definition = build_data_definition(dd)
if data_definition is None:
raise ValueError("No valid data definition found.")
@@ -2444,26 +2431,27 @@ def get_dd_name(dd):
return dd_name
-def set_extra_attributes_in_dd(dd):
+def set_extra_attributes_in_dd(dd, parms):
"""
- Set any extra attributes in dds like in global g_tmphlq.
+ Set any extra attributes in dds like in global tmp_hlq.
Args:
dd (dict): A single DD parm as specified in module parms.
Returns:
dd (dict): A single DD parm as specified in module parms.
"""
+ tmphlq = parms.get("tmp_hlq")
if dd.get("dd_data_set"):
- dd.get("dd_data_set")["tmphlq"] = g_tmphlq
+ dd.get("dd_data_set")["tmphlq"] = tmphlq
elif dd.get("dd_input"):
- dd.get("dd_input")["tmphlq"] = g_tmphlq
+ dd.get("dd_input")["tmphlq"] = tmphlq
elif dd.get("dd_output"):
- dd.get("dd_output")["tmphlq"] = g_tmphlq
+ dd.get("dd_output")["tmphlq"] = tmphlq
elif dd.get("dd_vio"):
- dd.get("dd_vio")["tmphlq"] = g_tmphlq
+ dd.get("dd_vio")["tmphlq"] = tmphlq
elif dd.get("dd_concat"):
for single_dd in dd.get("dd_concat").get("dds", []):
- set_extra_attributes_in_dd(single_dd)
+ set_extra_attributes_in_dd(single_dd, parms)
return dd
@@ -2572,6 +2560,7 @@ def __init__(
"""
self.backup = None
self.return_content = ReturnContent(**(return_content or {}))
+ self.tmphlq = tmphlq
primary_unit = space_type
secondary_unit = space_type
key_label1 = None
@@ -2698,7 +2687,6 @@ def __init__(
)
-# TODO: potentially extend the available parameters to end user
class RawInputDefinition(InputDefinition):
"""Wrapper around InputDefinition to contain information about
desired return contents.
@@ -2707,7 +2695,7 @@ class RawInputDefinition(InputDefinition):
InputDefinition (InputDefinition): Input DD data type to be used in a DDStatement.
"""
- def __init__(self, content="", return_content=None, **kwargs):
+ def __init__(self, content="", return_content=None, tmphlq="", **kwargs):
"""Initialize RawInputDefinition
Args:
@@ -2715,7 +2703,7 @@ def __init__(self, content="", return_content=None, **kwargs):
return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}.
"""
self.return_content = ReturnContent(**(return_content or {}))
- super().__init__(content=content)
+ super().__init__(content=content, tmphlq=tmphlq)
class RawOutputDefinition(OutputDefinition):
@@ -2726,7 +2714,7 @@ class RawOutputDefinition(OutputDefinition):
OutputDefinition (OutputDefinition): Output DD data type to be used in a DDStatement.
"""
- def __init__(self, return_content=None, **kwargs):
+ def __init__(self, return_content=None, tmphlq="", **kwargs):
"""Initialize RawOutputDefinition
Args:
@@ -2734,7 +2722,7 @@ def __init__(self, return_content=None, **kwargs):
return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}.
"""
self.return_content = ReturnContent(**(return_content or {}))
- super().__init__()
+ super().__init__(tmphlq=tmphlq)
class ReturnContent(object):
@@ -2761,28 +2749,6 @@ def __init__(self, type=None, src_encoding=None, response_encoding=None):
self.response_encoding = response_encoding
-def to_bytes(size, unit):
- """Convert sizes of various units to bytes.
-
- Args:
- size (int): The size to convert.
- unit (str): The unit of size.
-
- Returns:
- int: The size converted to bytes.
- """
- num_bytes = 0
- if unit == "b":
- num_bytes = size
- elif unit == "k":
- num_bytes = size * 1024
- elif unit == "m":
- num_bytes = size * 1048576
- elif unit == "g":
- num_bytes = size * 1073741824
- return num_bytes
-
-
def rename_parms(parms, name_map):
"""Rename parms based on a provided dictionary.
@@ -2839,7 +2805,7 @@ def data_set_exists(name, volumes=None):
def run_zos_program(
- program, parm="", dd_statements=None, authorized=False, verbose=False
+ program, parm="", dd_statements=None, authorized=False, verbose=False, tmp_hlq=None
):
"""Run a program on z/OS.
@@ -2848,6 +2814,7 @@ def run_zos_program(
parm (str, optional): Additional argument string if required. Defaults to "".
dd_statements (list[DDStatement], optional): DD statements to allocate for the program. Defaults to [].
authorized (bool, optional): Determines if program will execute as an authorized user. Defaults to False.
+ tmp_hlq (str, optional): Arguments overwrite variable tmp_hlq
Returns:
MVSCmdResponse: Holds the response information for program execution.
@@ -2857,11 +2824,11 @@ def run_zos_program(
response = None
if authorized:
response = MVSCmd.execute_authorized(
- pgm=program, parm=parm, dds=dd_statements, verbose=verbose
+ pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq
)
else:
response = MVSCmd.execute(
- pgm=program, parm=parm, dds=dd_statements, verbose=verbose
+ pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq
)
return response
diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py
index 8b03fd2e4..f37471397 100644
--- a/plugins/modules/zos_operator.py
+++ b/plugins/modules/zos_operator.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019 - 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -171,9 +171,13 @@
opercmd = ZOAUImportError(traceback.format_exc())
-def execute_command(operator_cmd, timeout=1, *args, **kwargs):
+def execute_command(operator_cmd, timeout_s=1, *args, **kwargs):
+
+ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore:
+ timeout_c = 100 * timeout_s
+
start = timer()
- response = opercmd.execute(operator_cmd, timeout, *args, **kwargs)
+ response = opercmd.execute(operator_cmd, timeout=timeout_c, *args, **kwargs)
end = timer()
rc = response.rc
stdout = response.stdout_response
@@ -287,7 +291,7 @@ def run_operator_command(params):
kwargs.update({"wait": True})
args = []
- rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs)
+ rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs)
if rc > 0:
message = "\nOut: {0}\nErr: {1}\nRan: {2}".format(stdout, stderr, cmdtxt)
diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py
index 022708692..ad6725103 100644
--- a/plugins/modules/zos_operator_action_query.py
+++ b/plugins/modules/zos_operator_action_query.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019 - 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -121,7 +121,7 @@
system: mv29
message_filter:
filter: ^.*IMS.*$
- use_regex: yes
+ use_regex: true
"""
RETURN = r"""
@@ -219,11 +219,16 @@
from ansible.module_utils.basic import AnsibleModule
import re
+import traceback
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import (
BetterArgParser,
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
+)
+
+from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
+ zoau_version_checker
)
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
@@ -233,10 +238,19 @@
try:
from zoautil_py import opercmd
except Exception:
- opercmd = MissingZOAUImport()
+ opercmd = ZOAUImportError(traceback.format_exc())
def run_module():
+ """Initialize module.
+
+ Raises
+ ------
+ fail_json
+ A non-zero return code was received while querying the operator.
+ fail_json
+ An unexpected error occurred.
+ """
module_args = dict(
system=dict(type="str", required=False),
message_id=dict(type="str", required=False),
@@ -272,7 +286,7 @@ def run_module():
cmdtxt = "d r,a,s"
- cmd_result_a = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs)
+ cmd_result_a = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs)
if cmd_result_a.rc > 0:
module.fail_json(
@@ -287,7 +301,7 @@ def run_module():
cmdtxt = "d r,a,jn"
- cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs)
+ cmd_result_b = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs)
if cmd_result_b.rc > 0:
module.fail_json(
@@ -316,6 +330,18 @@ def run_module():
def parse_params(params):
+ """Parse parameters using BetterArgParser.
+
+ Parameters
+ ----------
+ params : dict
+ Parameters to parse.
+
+ Returns
+ -------
+ dict
+ Parsed parameters.
+ """
arg_defs = dict(
system=dict(arg_type=system_type, required=False),
message_id=dict(arg_type=message_id_type, required=False),
@@ -328,24 +354,85 @@ def parse_params(params):
def system_type(arg_val, params):
+ """System type.
+
+ Parameters
+ ----------
+ arg_val : str
+ Argument to validate.
+ params : dict
+ Not used, but obligatory for BetterArgParser.
+
+ Returns
+ -------
+ str
+ arg_val validated in uppercase.
+ """
regex = "^(?:[a-zA-Z0-9]{1,8})|(?:[a-zA-Z0-9]{0,7}[*])$"
validate_parameters_based_on_regex(arg_val, regex)
return arg_val.upper()
def message_id_type(arg_val, params):
+ """Message id type.
+
+ Parameters
+ ----------
+ arg_val : str
+ Argument to validate.
+ params : dict
+ Not used, but obligatory for BetterArgParser.
+
+ Returns
+ -------
+ str
+ arg_val validated in uppercase.
+ """
regex = "^(?:[a-zA-Z0-9]{1,})|(?:[a-zA-Z0-9]{0,}[*])$"
validate_parameters_based_on_regex(arg_val, regex)
return arg_val.upper()
def job_name_type(arg_val, params):
+ """Job name type.
+
+ Parameters
+ ----------
+ arg_val : str
+ Argument to validate.
+ params : dict
+ Not used, but obligatory for BetterArgParser.
+
+ Returns
+ -------
+ str
+ arg_val validated in uppercase.
+ """
regex = "^(?:[a-zA-Z0-9]{1,8})|(?:[a-zA-Z0-9]{0,7}[*])$"
validate_parameters_based_on_regex(arg_val, regex)
return arg_val.upper()
def message_filter_type(arg_val, params):
+ """Message filter type.
+
+ Parameters
+ ----------
+ arg_val : str
+ Argument to validate.
+ params : dict
+ Not used, but obligatory for BetterArgParser.
+
+ Returns
+ -------
+ str
+ regex of the given argument.
+
+ Raises
+ ------
+ ValidationError
+ An error occurred during validate the input parameters.
+ """
try:
filter_text = arg_val.get("filter")
use_regex = arg_val.get("use_regex")
@@ -363,6 +450,25 @@ def message_filter_type(arg_val, params):
def validate_parameters_based_on_regex(value, regex):
+ """Validate parameters based on regex.
+
+ Parameters
+ ----------
+ value : str
+ Argument to compare to regex pattern.
+ regex : str
+ Regex to get pattern from.
+
+ Returns
+ -------
+ str
+ The value given.
+
+ Raises
+ ------
+ ValidationError
+ An error occurred during validate the input parameters.
+ """
pattern = re.compile(regex)
if pattern.fullmatch(value):
pass
@@ -372,7 +478,20 @@ def validate_parameters_based_on_regex(value, regex):
def find_required_request(merged_list, params):
- """Find the request given the options provided."""
+ """Find the request given the options provided.
+
+ Parameters
+ ----------
+ merged_list : list
+ Merged list to search.
+ params : dict
+ Parameters to get for the function.
+
+ Returns
+ -------
+ Union
+ Filtered list.
+ """
requests = filter_requests(merged_list, params)
return requests
@@ -380,9 +499,24 @@ def find_required_request(merged_list, params):
def create_merge_list(message_a, message_b, message_filter):
"""Merge the return lists that execute both 'd r,a,s' and 'd r,a,jn'.
For example, if we have:
- 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO'OR 'CANCEL'"
+ 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO' OR 'CANCEL'"
'd r,a,jn' response like:"742 R FVFNT29H &742 ARC0055A REPLY 'GO' OR 'CANCEL'"
- the results will be merged so that a full list of information returned on condition"""
+ the results will be merged so that a full list of information returned on condition.
+
+ Parameters
+ ----------
+ message_a : str
+ Result coming from command 'd r,a,s'.
+ message_b : str
+ Result coming from command 'd r,a,jn'.
+ message_filter : str
+ Message filter.
+
+ Returns
+ -------
+ Union
+ Merge of the result of message_a and the result of message_b.
+ """
list_a = parse_result_a(message_a, message_filter)
list_b = parse_result_b(message_b, message_filter)
merged_list = merge_list(list_a, list_b)
@@ -390,40 +524,87 @@ def create_merge_list(message_a, message_b, message_filter):
def filter_requests(merged_list, params):
- """filter the request given the params provided."""
+ """Filter the request given the params provided.
+
+ Parameters
+ ----------
+ merged_list : list
+ Merged list to filter.
+ params : dict
+ Parameters to get for the function.
+
+ Returns
+ -------
+ Union
+ Filtered list.
+ """
system = params.get("system")
message_id = params.get("message_id")
job_name = params.get("job_name")
newlist = merged_list
-
if system:
newlist = handle_conditions(newlist, "system", system)
if job_name:
newlist = handle_conditions(newlist, "job_name", job_name)
if message_id:
newlist = handle_conditions(newlist, "message_id", message_id)
-
return newlist
-def handle_conditions(list, condition_type, value):
+def handle_conditions(merged_list, condition_type, value):
+ """Handle conditions.
+
+ Parameters
+ ----------
+ merged_list : list[dict]
+ List to check.
+ condition_type : str
+ Condition type to check.
+ value
+ Value to check for.
+
+ Returns
+ -------
+ Union[dict]
+ The new list.
+ """
# regex = re.compile(condition_values)
newlist = []
- for dict in list:
- if value.endswith("*"):
- exist = dict.get(condition_type).startswith(value.rstrip("*"))
- else:
- exist = dict.get(condition_type) == value
+ exist = False
+ for message in merged_list:
+ if message.get(condition_type) is not None:
+ if value.endswith("*"):
+ exist = message.get(condition_type).startswith(value.rstrip("*"))
+ else:
+ exist = message.get(condition_type) == value
if exist:
- newlist.append(dict)
+ newlist.append(message)
return newlist
-def execute_command(operator_cmd, timeout=1, *args, **kwargs):
-
- # response = opercmd.execute(operator_cmd)
- response = opercmd.execute(operator_cmd, timeout, *args, **kwargs)
+def execute_command(operator_cmd, timeout_s=1, *args, **kwargs):
+ """Execute operator command.
+
+ Parameters
+ ----------
+ operator_cmd : str
+ Operator command.
+ timeout_s : int
+ Timeout to wait for the command execution, measured in centiseconds.
+ *args : dict
+ Arguments for the command.
+ **kwargs : dict
+ More arguments for the command.
+
+ Returns
+ -------
+ OperatorQueryResult
+ The result of the command.
+ """
+ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore:
+ timeout_c = 100 * timeout_s
+ response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs)
rc = response.rc
stdout = response.stdout_response
@@ -432,6 +613,20 @@ def execute_command(operator_cmd, timeout=1, *args, **kwargs):
def match_raw_message(msg, message_filter):
+ """Match raw message.
+
+ Parameters
+ ----------
+ msg : str
+ Message to match.
+ message_filter : str
+ Filter for the message.
+
+ Return
+ ------
+ bool
+ If the pattern matches msg.
+ """
pattern = re.compile(message_filter, re.DOTALL)
return pattern.match(msg)
@@ -441,7 +636,20 @@ def parse_result_a(result, message_filter):
there are usually two formats:
- line with job_id: 810 R MV2D JOB58389 &810 ARC0055A REPLY 'GO' OR 'CANCEL'
- line without job_id: 574 R MV28 *574 IXG312E OFFLOAD DELAYED FOR..
- also the request contains multiple lines, we need to handle that as well"""
+ also the request contains multiple lines, we need to handle that as well.
+
+ Parameters
+ ----------
+ result : str
+ Result coming from command 'd r,a,s'.
+ message_filter : str
+ Message filter.
+
+ Returns
+ -------
+ Union[dict[str,str]]
+ Resulting list.
+ """
dict_temp = {}
list = []
@@ -473,7 +681,20 @@ def parse_result_a(result, message_filter):
def parse_result_b(result, message_filter):
"""Parse the result that comes from command 'd r,a,jn', the main purpose
to use this command is to get the job_name and message id, which is not
- included in 'd r,a,s'"""
+ included in 'd r,a,s'
+
+ Parameters
+ ----------
+ result : str
+ Result coming from command 'd r,a,jn'.
+ message_filter : str
+ Message filter.
+
+ Returns
+ -------
+ Union[dict[str,str]]
+ Resulting list.
+ """
dict_temp = {}
list = []
@@ -505,6 +726,20 @@ def parse_result_b(result, message_filter):
def merge_list(list_a, list_b):
+ """Merge lists.
+
+ Parameters
+ ----------
+ list_a : list
+ First list to be merged.
+ list_b : list
+ Second list to be merged.
+
+ Returns
+ -------
+ Union
+ Merged of list_a and list_b.
+ """
merged_list = []
for dict_a in list_a:
for dict_b in list_b:
@@ -521,6 +756,18 @@ class Error(Exception):
class ValidationError(Error):
def __init__(self, message):
+ """An error occurred during validate the input parameters.
+
+ Parameters
+ ----------
+ message : str
+ Message of the error that ocurred.
+
+ Attributes
+ ----------
+ msg : str
+ Human readable string describing the exception.
+ """
self.msg = (
'An error occurred during validate the input parameters: "{0}"'.format(
message
@@ -537,12 +784,27 @@ def __init__(
):
"""Response object class to manage the result from executing a command
to query for actionable messages. Class will also generate a message
- by concatenating stdout and stderr
-
- Arguments:
- rc {str} -- The return code
- stdout {str} -- The standard out of the command run
- stderr {str} -- The standard error of the command run
+ by concatenating stdout and stderr.
+
+ Parameters
+ ----------
+ rc : str
+ The return code.
+ stdout : str
+ The standard out of the command run.
+ stderr : str
+ The standard error of the command run.
+
+ Attributes
+ ----------
+ rc : str
+ The return code.
+ stdout : str
+ The standard out of the command run.
+ stderr : str
+ The standard error of the command run.
+ message : str
+ The standard out of the command run.
"""
self.rc = rc
self.stdout = stdout
diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx
index beca54c3b..78e09f6b5 100644
--- a/plugins/modules/zos_ping.rexx
+++ b/plugins/modules/zos_ping.rexx
@@ -1,7 +1,7 @@
/* rexx __ANSIBLE_ENCODE_EBCDIC__ */
/* WANT_JSON */
-/* Copyright (c) IBM Corporation 2019, 2020, 2023 */
+/* Copyright (c) IBM Corporation 2019, 2023 */
/* Licensed under the Apache License, Version 2.0 (the "License"); */
/* you may not use this file except in compliance with the License. */
diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py
index 0677d187d..580773219 100644
--- a/plugins/modules/zos_script.py
+++ b/plugins/modules/zos_script.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2023
+# Copyright (c) IBM Corporation 2023, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -229,6 +229,17 @@
def run_module():
+ """Initialize module.
+
+ Raises
+ ------
+ fail_json
+ Parameter verification failed.
+ fail_json
+ The given chdir does not exist on the system.
+ fail_json
+ The script terminated with an error.
+ """
module = AnsibleModule(
argument_spec=dict(
chdir=dict(type='str', required=False),
diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py
index 17e190fb2..2ac4a9d32 100644
--- a/plugins/modules/zos_tso_command.py
+++ b/plugins/modules/zos_tso_command.py
@@ -135,6 +135,23 @@
def run_tso_command(commands, module, max_rc):
+ """Run tso command.
+
+ Parameters
+ ----------
+ commands : str
+ Commands to run.
+ module : AnsibleModule
+ Ansible module to run the command with.
+ max_rc : int
+ Max return code.
+
+ Returns
+ -------
+ Union[dict]
+ The command result details.
+
+ """
script = """/* REXX */
PARSE ARG cmd
address tso
@@ -152,6 +169,24 @@ def run_tso_command(commands, module, max_rc):
def copy_rexx_and_run_commands(script, commands, module, max_rc):
+ """Copy rexx into a temporary file and run commands.
+
+ Parameters
+ ----------
+ script : str
+ Script to run the command.
+ commands : str
+ Commands to run.
+ module : AnsibleModule
+ Ansible module to run the command with.
+ max_rc : int
+ Max return code.
+
+ Returns
+ -------
+ Union[dict]
+ The command result details.
+ """
command_detail_json = []
delete_on_close = True
tmp_file = NamedTemporaryFile(delete=delete_on_close)
@@ -180,6 +215,25 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc):
def list_or_str_type(contents, dependencies):
+ """Checks if a variable contains a string or a list of strings and returns it as a list of strings.
+
+ Parameters
+ ----------
+ contents : str | list[str]
+ String or list of strings.
+ dependencies
+ Unused.
+
+ Returns
+ -------
+ str | Union[str]
+ The parameter given as a list of strings.
+
+ Raises
+ ------
+ ValueError
+ Invalid argument type. Expected "string or list of strings".
+ """
failed = False
if isinstance(contents, list):
for item in contents:
@@ -200,6 +254,17 @@ def list_or_str_type(contents, dependencies):
def run_module():
+ """Initialize module.
+
+ Raises
+ ------
+ fail_json
+ ValueError on BetterArgParser.
+ fail_json
+ Some command(s) failed.
+ fail_json
+ An unexpected error occurred.
+ """
module_args = dict(
commands=dict(type="raw", required=True, aliases=["command"]),
max_rc=dict(type="int", required=False, default=0),
diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py
index e6603cddc..cb587dc0e 100644
--- a/plugins/modules/zos_unarchive.py
+++ b/plugins/modules/zos_unarchive.py
@@ -181,11 +181,11 @@
- Organization of the destination
type: str
required: false
- default: SEQ
+ default: seq
choices:
- - SEQ
- - PDS
- - PDSE
+ - seq
+ - pds
+ - pdse
space_primary:
description:
- If the destination I(dest) data set does not exist , this sets the
@@ -204,28 +204,28 @@
description:
- If the destination data set does not exist, this sets the unit of
measurement to use when defining primary and secondary space.
- - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK).
+ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk).
type: str
choices:
- - K
- - M
- - G
- - CYL
- - TRK
+ - k
+ - m
+ - g
+ - cyl
+ - trk
required: false
record_format:
description:
- If the destination data set does not exist, this sets the format of
the
- data set. (e.g C(FB))
- - Choices are case-insensitive.
+ data set. (e.g C(fb))
+ - Choices are case-sensitive.
required: false
choices:
- - FB
- - VB
- - FBA
- - VBA
- - U
+ - fb
+ - vb
+ - fba
+ - vba
+ - u
type: str
record_length:
description:
@@ -249,15 +249,15 @@
key_offset:
description:
- The key offset to use when creating a KSDS data set.
- - I(key_offset) is required when I(type=KSDS).
- - I(key_offset) should only be provided when I(type=KSDS)
+ - I(key_offset) is required when I(type=ksds).
+ - I(key_offset) should only be provided when I(type=ksds)
type: int
required: false
key_length:
description:
- The key length to use when creating a KSDS data set.
- - I(key_length) is required when I(type=KSDS).
- - I(key_length) should only be provided when I(type=KSDS)
+ - I(key_length) is required when I(type=ksds).
+ - I(key_length) should only be provided when I(type=ksds)
type: int
required: false
sms_storage_class:
@@ -356,8 +356,8 @@
format:
name: xmit
format_options:
- use_adrdssu: True
- list: True
+ use_adrdssu: true
+ list: true
'''
RETURN = r'''
@@ -395,14 +395,15 @@
import os
import zipfile
import tarfile
+import traceback
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
+ ZOAUImportError,
)
try:
from zoautil_py import datasets
except Exception:
- Datasets = MissingZOAUImport()
+ datasets = ZOAUImportError(traceback.format_exc())
data_set_regex = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}"
@@ -649,8 +650,8 @@ def _compute_dest_data_set_size(self):
"""
# Get the size from the system
- src_attributes = datasets.listing(self.src)[0]
- # The size returned by listing is in bytes.
+ src_attributes = datasets.list_datasets(self.src)[0]
+ # The size returned by list_datasets is in bytes.
source_size = int(src_attributes.total_space)
if self.format == 'terse':
source_size = int(source_size * 1.5)
@@ -690,17 +691,15 @@ def _create_dest_data_set(
if tmp_hlq:
hlq = tmp_hlq
else:
- rc, hlq, err = self.module.run_command("hlq")
- hlq = hlq.replace('\n', '')
- cmd = "mvstmphelper {0}.RESTORE".format(hlq)
- rc, temp_ds, err = self.module.run_command(cmd)
- arguments.update(name=temp_ds.replace('\n', ''))
+ hlq = datasets.get_hlq()
+ temp_ds = datasets.tmp_name(high_level_qualifier=hlq)
+ arguments.update(name=temp_ds)
if record_format is None:
- arguments.update(record_format="FB")
+ arguments.update(record_format="fb")
if record_length is None:
arguments.update(record_length=80)
if type is None:
- arguments.update(type="SEQ")
+ arguments.update(type="seq")
if space_primary is None:
arguments.update(space_primary=self._compute_dest_data_set_size())
arguments.pop("self")
@@ -803,8 +802,8 @@ def extract_src(self):
temp_ds, rc = self._create_dest_data_set(**self.dest_data_set)
rc = self.unpack(self.src, temp_ds)
else:
- temp_ds, rc = self._create_dest_data_set(type="SEQ",
- record_format="U",
+ temp_ds, rc = self._create_dest_data_set(type="seq",
+ record_format="u",
record_length=0,
tmp_hlq=self.tmphlq,
replace=True)
@@ -824,7 +823,7 @@ def _list_content(self, source):
self._get_restored_datasets(out)
def list_archive_content(self):
- temp_ds, rc = self._create_dest_data_set(type="SEQ", record_format="U", record_length=0, tmp_hlq=self.tmphlq, replace=True)
+ temp_ds, rc = self._create_dest_data_set(type="seq", record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True)
self.unpack(self.src, temp_ds)
self._list_content(temp_ds)
datasets.delete(temp_ds)
@@ -1027,9 +1026,9 @@ def run_module():
),
type=dict(
type='str',
- choices=['SEQ', 'PDS', 'PDSE'],
+ choices=['seq', 'pds', 'pdse'],
required=False,
- default='SEQ',
+ default='seq',
),
space_primary=dict(
type='int', required=False),
@@ -1037,12 +1036,12 @@ def run_module():
type='int', required=False),
space_type=dict(
type='str',
- choices=['K', 'M', 'G', 'CYL', 'TRK'],
+ choices=['k', 'm', 'g', 'cyl', 'trk'],
required=False,
),
record_format=dict(
type='str',
- choices=["FB", "VB", "FBA", "VBA", "U"],
+ choices=["fb", "vb", "fba", "vba", "u"],
required=False
),
record_length=dict(type='int', required=False),
@@ -1108,7 +1107,7 @@ def run_module():
required=False,
options=dict(
name=dict(arg_type='str', required=False),
- type=dict(arg_type='str', required=False, default="SEQ"),
+ type=dict(arg_type='str', required=False, default="seq"),
space_primary=dict(arg_type='int', required=False),
space_secondary=dict(
arg_type='int', required=False),
diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py
index 6dbc9f97e..d0a2c55be 100644
--- a/plugins/modules/zos_volume_init.py
+++ b/plugins/modules/zos_volume_init.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2022, 2023
+# Copyright (c) IBM Corporation 2022, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -151,14 +151,14 @@
zos_volume_init:
address: "1234"
volid: "DEMO01"
- sms_managed: no
+ sms_managed: false
- name: Initialize non-SMS managed target volume with all the default options and
override the default high level qualifier (HLQ).
zos_volume_init:
address: 1234
volid: DEMO01
- sms_managed: no
+ sms_managed: false
tmp_hlq: TESTUSR
- name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as
@@ -167,12 +167,12 @@
zos_volume_init:
address: e8d8
vtoc_size: 30
- index: yes
- sms_managed: yes
+ index: true
+ sms_managed: true
volid: ine8d8
verify_volid: ine8d8
- verify_volume_empty: yes
- verify_offline: no
+ verify_volume_empty: true
+ verify_offline: false
- name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03'
using Ansible loops.
@@ -230,7 +230,13 @@
def run_module():
+ """Initialize the module.
+ Raises
+ ------
+ fail_json
+ 'Index' cannot be False for SMS managed volumes.
+ """
module_args = dict(
address=dict(type="str", required=True),
verify_volid=dict(type="str", required=False),
diff --git a/scripts/requirements-2.11.env b/scripts/requirements-2.11.env
index e7defb9fc..3b4a18d0c 100644
--- a/scripts/requirements-2.11.env
+++ b/scripts/requirements-2.11.env
@@ -31,5 +31,5 @@ requirements=(
)
python=(
-"python:3.8"
+"<=:python:3.9"
)
\ No newline at end of file
diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env
index 5052447da..4f6add957 100644
--- a/scripts/requirements-2.12.env
+++ b/scripts/requirements-2.12.env
@@ -28,5 +28,5 @@ requirements=(
)
python=(
-"python:3.8"
+"<=:python:3.10"
)
\ No newline at end of file
diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env
index c08a7c7e9..cfce646d0 100644
--- a/scripts/requirements-2.13.env
+++ b/scripts/requirements-2.13.env
@@ -28,5 +28,5 @@ requirements=(
)
python=(
-"python:3.8"
+"<=:python:3.10"
)
\ No newline at end of file
diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env
index 9d15b3dab..f1c423f8b 100644
--- a/scripts/requirements-2.14.env
+++ b/scripts/requirements-2.14.env
@@ -28,5 +28,5 @@ requirements=(
)
python=(
-"python:3.9"
+"<=:python:3.11"
)
\ No newline at end of file
diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env
index 5f8b36260..3d94e55af 100644
--- a/scripts/requirements-2.15.env
+++ b/scripts/requirements-2.15.env
@@ -28,5 +28,5 @@ requirements=(
)
python=(
-"python:3.9"
+"<=:python:3.11"
)
diff --git a/scripts/requirements-2.16.env b/scripts/requirements-2.16.env
new file mode 100644
index 000000000..2d0ed42a1
--- /dev/null
+++ b/scripts/requirements-2.16.env
@@ -0,0 +1,32 @@
+#!/bin/sh
+# ==============================================================================
+# Copyright (c) IBM Corporation 2024
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+# ==============================================================================
+# File name must adhere to reqs-.sh, supporting concurrent
+# patches could come and to do that you would want to not use unique files for
+# each ansible version but instead have this file provide the meta-data such
+# the sourcing scrips know to create additional ansible venvs and maybe even
+# clean up if they are removed from this file.
+# eg venvs = [requirements, requirements2, requirments3]
+# ==============================================================================
+
+requirements=(
+"ansible-core:2.16.3"
+"pylint"
+"rstcheck"
+)
+
+python=(
+"<=:python:3.12"
+)
diff --git a/scripts/requirements-2.9.env b/scripts/requirements-2.9.env
index 2d7d9e11b..b962483f9 100644
--- a/scripts/requirements-2.9.env
+++ b/scripts/requirements-2.9.env
@@ -30,6 +30,6 @@ requirements=(
)
python=(
-"python:3.8"
+"==:python:3.8"
)
diff --git a/scripts/venv.sh b/scripts/venv.sh
index 51426a055..56756d16e 100755
--- a/scripts/venv.sh
+++ b/scripts/venv.sh
@@ -27,6 +27,13 @@ VENV_HOME_MANAGED=${PWD%/*}/venv
# Array where each entry is: ":::"
HOSTS_ALL=""
+OPER_EQ="=="
+OPER_NE="!="
+OPER_LT="<"
+OPER_LE="<="
+OPER_GT=">"
+OPER_GE=">="
+
# hosts_env="hosts.env"
# if [ -f "$hosts_env" ]; then
@@ -128,9 +135,9 @@ echo_requirements(){
py_req="0"
for ver in "${python[@]}" ; do
- key=${ver%%:*}
- value=${ver#*:}
- py_req="${value}"
+ py_op=`echo "${ver}" | cut -d ":" -f 1`
+ py_name=`echo "${ver}" | cut -d ":" -f 2`
+ py_req=`echo "${ver}" | cut -d ":" -f 3`
done
echo "${py_req}"
done
@@ -222,13 +229,29 @@ write_requirements(){
py_req="0"
for ver in "${python[@]}" ; do
- key=${ver%%:*}
- value=${ver#*:}
- py_req="${value}"
+ py_op=`echo "${ver}" | cut -d ":" -f 1`
+ py_name=`echo "${ver}" | cut -d ":" -f 2`
+ py_req=`echo "${ver}" | cut -d ":" -f 3`
done
+ if [ "$OPER_EQ" == "$py_op" ];then
+ py_op="-eq"
+ elif [ "$OPER_NE" == "$py_op" ];then
+ py_op="-ne"
+ elif [ "$OPER_LT" == "$py_op" ];then
+ py_op="-lt"
+ elif [ "$OPER_LE" == "$py_op" ];then
+ py_op="-le"
+ elif [ "$OPER_GT" == "$py_op" ];then
+ py_op="-gt"
+ elif [ "$OPER_GE" == "$py_op" ];then
+ py_op="-ge"
+ fi
+
+ discover_python $py_op $py_req
+
# Is the discoverd python >= what the requirements.txt requires?
- if [ $(normalize_version $VERSION_PYTHON) -ge $(normalize_version $py_req) ]; then
+ if [ $(normalize_version $VERSION_PYTHON) "$py_op" $(normalize_version $py_req) ]; then
echo "${REQ}${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt
cp mounts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/
#cp info.env "${VENV_HOME_MANAGED}"/"${venv_name}"/
@@ -245,6 +268,16 @@ write_requirements(){
chmod 700 "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env
#echo "${option_pass}" | openssl bf -d -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin
echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin
+ else
+ # echo a stub so the user can later choose to rename and configure
+ touch "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "# This configuration file is used by the tool to avoid exporting enviroment variables">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "# To use this, update all the variables with a value and rename the file to 'info.env'.">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "USER=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "PASS=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "HOST_SUFFIX=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "SSH_KEY_PIPELINE=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme
+ echo "No password was provided, a temporary 'info.env.changeme' file has been created for your convenience."
fi
else
echo "Not able to create managed venv path: ${VENV_HOME_MANAGED}/${venv_name} , min python required is ${py_req}, found version $VERSION_PYTHON"
@@ -282,36 +315,50 @@ find_in_path() {
result=""
OTHER_PYTHON_PATHS="/Library/Frameworks/Python.framework/Versions/Current/bin:/opt/homebrew/bin:"
PATH="${OTHER_PYTHON_PATHS}${PATH}"
+ OLDIFS=$IFS
IFS=:
for x in $PATH; do
if [ -x "$x/$1" ]; then
result=${result}" $x/$1"
fi
done
+ IFS=$OLDIFS
echo $result
}
-
# Find the most recent python in a users path
discover_python(){
- # Don't use which, it only will find first in path within script
+ operator=$1
+ required_python=$2
+ if [ ! "$operator" ]; then
+ operator="-ge"
+ fi
+
+ if [ "$required_python" ]; then
+ VERSION_PYTHON=$required_python
+ fi
+
+ # Don't use which, it only will find first in path within the script
# for python_found in `which python3 | cut -d" " -f3`; do
- pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11") # "python3.12" "python3.13" "python3.14")
- #pys=("python3.8" "python3.9")
+ pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8")
+ rc=1
for py in "${pys[@]}"; do
for python_found in `find_in_path $py`; do
ver=`${python_found} --version | cut -d" " -f2`
+ rc=$?
+ ver=`echo $ver |cut -d"." -f1,2`
ver_path="$python_found"
echo "Found $ver_path"
done
-
- if [ $(normalize_version $ver) -ge $(normalize_version $VERSION_PYTHON) ]; then
- VERSION_PYTHON="$ver"
- VERSION_PYTHON_PATH="$ver_path"
+ if [ $rc -eq 0 ];then
+ if [ $(normalize_version $ver) "$operator" $(normalize_version $VERSION_PYTHON) ]; then
+ VERSION_PYTHON="$ver"
+ VERSION_PYTHON_PATH="$ver_path"
+ break
+ fi
fi
-
done
echo ${DIVIDER}
@@ -406,7 +453,7 @@ set_hosts_to_array(){
else # check if the env varas instead have been exported
if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then
echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported."
- echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env."
+ echo "Export and set vars: 'USER', 'PASS','HOST_SUFFIX' and optionally 'SSH_KEY_PIPELINE', or place them in a file named info.env."
exit 1
fi
fi
@@ -566,7 +613,7 @@ case "$1" in
discover_python
;;
--vsetup)
- discover_python
+ #discover_python
make_venv_dirs
#echo_requirements
write_requirements $3
diff --git a/tests/conftest.py b/tests/conftest.py
index c8513ad37..7fea5ac0d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -14,7 +14,7 @@
__metaclass__ = type
import pytest
from ibm_zos_core.tests.helpers.ztest import ZTestHelper
-from ibm_zos_core.tests.helpers.volumes import get_volumes
+from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds
import sys
from mock import MagicMock
import importlib
@@ -93,6 +93,18 @@ def volumes_on_systems(ansible_zos_module, request):
list_Volumes = get_volumes(ansible_zos_module, path)
yield list_Volumes
+
+@pytest.fixture(scope="session")
+def volumes_with_vvds(ansible_zos_module, request):
+ """ Return a list of volumes that have a VVDS. If no volume has a VVDS
+ then it will try to create one for each volume found and return volumes only
+ if a VVDS was successfully created for it."""
+ path = request.config.getoption("--zinventory")
+ volumes = get_volumes(ansible_zos_module, path)
+ volumes_with_vvds = get_volumes_with_vvds(ansible_zos_module, volumes)
+ yield volumes_with_vvds
+
+
# * We no longer edit sys.modules directly to add zoautil_py mock
# * because automatic teardown is not performed, leading to mock pollution
# * across test files.
diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py
index 66b1919a6..7c19ea31a 100644
--- a/tests/functional/modules/test_zos_apf_func.py
+++ b/tests/functional/modules/test_zos_apf_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2022
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -16,9 +16,6 @@
from ibm_zos_core.tests.helpers.volumes import Volume_Handler
from shellescape import quote
from pprint import pprint
-import os
-import sys
-import pytest
__metaclass__ = type
@@ -53,103 +50,116 @@ def clean_test_env(hosts, test_info):
hosts.all.shell(cmd=cmdStr)
-def test_add_del(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", state="present", force_dynamic=True)
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+def test_add_del(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", state="present", force_dynamic=True)
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 0
+ test_info['state'] = 'absent'
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- test_info['state'] = 'absent'
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- clean_test_env(hosts, test_info)
-
-
-def test_add_del_with_tmp_hlq_option(ansible_zos_module):
- hosts = ansible_zos_module
- tmphlq = "TMPHLQ"
- test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True))
- test_info['tmp_hlq'] = tmphlq
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 0
+ finally:
+ clean_test_env(hosts, test_info)
+
+
+def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ tmphlq = "TMPHLQ"
+ test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True))
+ test_info['tmp_hlq'] = tmphlq
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 0
+ assert result.get("backup_name")[:6] == tmphlq
+ test_info['state'] = 'absent'
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- assert result.get("backup_name")[:6] == tmphlq
- test_info['state'] = 'absent'
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- clean_test_env(hosts, test_info)
-
-
-def test_add_del_volume(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", volume="", state="present", force_dynamic=True)
- ds = get_tmp_ds_name(1,1)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 0
+ finally:
+ clean_test_env(hosts, test_info)
+
+
+def test_add_del_volume(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", volume="", state="present", force_dynamic=True)
+ ds = get_tmp_ds_name(1,1)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 0
+ test_info['state'] = 'absent'
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- test_info['state'] = 'absent'
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- clean_test_env(hosts, test_info)
+ assert result.get("rc") == 0
+ finally:
+ clean_test_env(hosts, test_info)
"""
+This test case was removed 3 years ago in the following PR : https://github.com/ansible-collections/ibm_zos_core/pull/197
def test_add_del_persist(ansible_zos_module):
hosts = ansible_zos_module
test_info = TEST_INFO['test_add_del_persist']
@@ -176,102 +186,113 @@ def test_add_del_persist(ansible_zos_module):
"""
-def test_add_del_volume_persist(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
- ds = get_tmp_ds_name(1,1)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
+ ds = get_tmp_ds_name(1,1)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ results = hosts.all.zos_apf(**test_info)
+ for result in results.contacted.values():
+ assert result.get("rc") == 0
+ add_exptd = add_expected.format(test_info['library'], test_info['volume'])
+ add_exptd = add_exptd.replace(" ", "")
+ cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
results = hosts.all.shell(cmd=cmdStr)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
+ actual = result.get("stdout")
+ actual = actual.replace(" ", "")
+ assert actual == add_exptd
+ test_info['state'] = 'absent'
+ results = hosts.all.zos_apf(**test_info)
+ for result in results.contacted.values():
+ assert result.get("rc") == 0
+ del_exptd = del_expected.replace(" ", "")
+ cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
results = hosts.all.shell(cmd=cmdStr)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
+ actual = result.get("stdout")
+ actual = actual.replace(" ", "")
+ assert actual == del_exptd
+ finally:
+ clean_test_env(hosts, test_info)
+
+"""
+keyword: ENABLE-FOR-1-3
+Test commented because there is a failure in ZOAU 1.2.x, that should be fixed in 1.3.x, so
+whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726
+should uncomment this test as part of the validation process.
+"""
+def test_batch_add_del(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(
+ batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")],
+ persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True
+ )
+ for item in test_info['batch']:
+ ds = get_tmp_ds_name(1,1)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ item['library'] = ds
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ item['volume'] = vol
+ prstds = get_tmp_ds_name(5,5)
cmdStr = "dtouch -tseq {0}".format(prstds)
hosts.all.shell(cmd=cmdStr)
test_info['persistent']['data_set_name'] = prstds
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- add_exptd = add_expected.format(test_info['library'], test_info['volume'])
- add_exptd = add_exptd.replace(" ", "")
- cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- actual = result.get("stdout")
- actual = actual.replace(" ", "")
- assert actual == add_exptd
- test_info['state'] = 'absent'
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- del_exptd = del_expected.replace(" ", "")
- cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- actual = result.get("stdout")
- actual = actual.replace(" ", "")
- assert actual == del_exptd
- clean_test_env(hosts, test_info)
-
-
-#def test_batch_add_del(ansible_zos_module):
-# hosts = ansible_zos_module
-# test_info = dict(
-# batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")],
-# persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True
-# )
-# for item in test_info['batch']:
-# ds = get_tmp_ds_name(1,1)
-# hosts.all.shell(cmd="dtouch {0}".format(ds))
-# item['library'] = ds
-# cmdStr = "dls -l " + ds + " | awk '{print $5}' "
-# results = hosts.all.shell(cmd=cmdStr)
-# for result in results.contacted.values():
-# vol = result.get("stdout")
-# item['volume'] = vol
-# prstds = get_tmp_ds_name(5,5)
-# cmdStr = "dtouch {0}".format(prstds)
-# hosts.all.shell(cmd=cmdStr)
-# test_info['persistent']['data_set_name'] = prstds
-# hosts.all.shell(cmd="echo \"{0}\" > {1}".format("Hello World, Here's Jhonny", prstds))
-# results = hosts.all.zos_apf(**test_info)
-# pprint(vars(results))
-# for result in results.contacted.values():
-# assert result.get("rc") == 0
-# add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'],
-# test_info['batch'][1]['library'], test_info['batch'][1]['volume'],
-# test_info['batch'][2]['library'], test_info['batch'][2]['volume'])
-# add_exptd = add_exptd.replace(" ", "")
-# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
-# results = hosts.all.shell(cmd=cmdStr)
-# for result in results.contacted.values():
-# actual = result.get("stdout")
-# actual = actual.replace(" ", "")
-# assert actual == add_exptd
-# test_info['state'] = 'absent'
-# results = hosts.all.zos_apf(**test_info)
-# pprint(vars(results))
-# for result in results.contacted.values():
-# assert result.get("rc") == 0
-# del_exptd = del_expected.replace(" ", "")
-# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
-# results = hosts.all.shell(cmd=cmdStr)
-# for result in results.contacted.values():
-# actual = result.get("stdout")
-# actual = actual.replace(" ", "")
-# assert actual == del_exptd
-# for item in test_info['batch']:
-# clean_test_env(hosts, item)
-# cmdStr = "drm {0}".format(test_info['persistent']['data_set_name'])
-# hosts.all.shell(cmd=cmdStr)
+ results = hosts.all.zos_apf(**test_info)
+ pprint(vars(results))
+ for result in results.contacted.values():
+ assert result.get("rc") == 0
+ add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'],
+ test_info['batch'][1]['library'], test_info['batch'][1]['volume'],
+ test_info['batch'][2]['library'], test_info['batch'][2]['volume'])
+ add_exptd = add_exptd.replace(" ", "")
+ cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ actual = result.get("stdout")
+ actual = actual.replace(" ", "")
+ assert actual == add_exptd
+ test_info['state'] = 'absent'
+ results = hosts.all.zos_apf(**test_info)
+ pprint(vars(results))
+ for result in results.contacted.values():
+ assert result.get("rc") == 0
+ del_exptd = del_expected.replace(" ", "")
+ cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name'])
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ actual = result.get("stdout")
+ actual = actual.replace(" ", "")
+ assert actual == del_exptd
+ finally:
+ for item in test_info['batch']:
+ clean_test_env(hosts, item)
+ hosts.all.shell(cmd="drm {0}".format(test_info['persistent']['data_set_name']))
def test_operation_list(ansible_zos_module):
@@ -280,111 +301,124 @@ def test_operation_list(ansible_zos_module):
results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
listJson = result.get("stdout")
+ print(listJson)
import json
data = json.loads(listJson)
- assert data[0]['format'] in ['DYNAMIC', 'STATIC']
+ assert data['format'] in ['DYNAMIC', 'STATIC']
del json
-def test_operation_list_with_filter(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", state="present", force_dynamic=True)
- test_info['state'] = 'present'
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", state="present", force_dynamic=True)
+ test_info['state'] = 'present'
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ hosts.all.zos_apf(**test_info)
+ ti = dict(operation="list", library="")
+ ti['library'] = "ANSIBLE.*"
+ results = hosts.all.zos_apf(**ti)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- hosts.all.zos_apf(**test_info)
- ti = dict(operation="list", library="")
- ti['library'] = "APFTEST.*"
- results = hosts.all.zos_apf(**ti)
- for result in results.contacted.values():
- listFiltered = result.get("stdout")
- assert test_info['library'] in listFiltered
- test_info['state'] = 'absent'
- hosts.all.zos_apf(**test_info)
- clean_test_env(hosts, test_info)
+ listFiltered = result.get("stdout")
+ assert test_info['library'] in listFiltered
+ test_info['state'] = 'absent'
+ hosts.all.zos_apf(**test_info)
+ finally:
+ clean_test_env(hosts, test_info)
#
# Negative tests
#
-def test_add_already_present(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", state="present", force_dynamic=True)
- test_info['state'] = 'present'
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+def test_add_already_present(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", state="present", force_dynamic=True)
+ test_info['state'] = 'present'
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 0
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 0
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0
- assert result.get("rc") == 16 or result.get("rc") == 8
- test_info['state'] = 'absent'
- hosts.all.zos_apf(**test_info)
- clean_test_env(hosts, test_info)
-
-
-def test_del_not_present(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", state="present", force_dynamic=True)
- ds = get_tmp_ds_name(1,1)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0
+ assert result.get("rc") == 16 or result.get("rc") == 8
+ test_info['state'] = 'absent'
+ hosts.all.zos_apf(**test_info)
+ finally:
+ clean_test_env(hosts, test_info)
+
+
+def test_del_not_present(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", state="present", force_dynamic=True)
+ ds = get_tmp_ds_name(1,1)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ test_info['state'] = 'absent'
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- test_info['state'] = 'absent'
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0
- assert result.get("rc") == 16 or result.get("rc") == 8
- clean_test_env(hosts, test_info)
+ # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0
+ assert result.get("rc") == 16 or result.get("rc") == 8
+ finally:
+ clean_test_env(hosts, test_info)
def test_add_not_found(ansible_zos_module):
@@ -397,119 +431,135 @@ def test_add_not_found(ansible_zos_module):
assert result.get("rc") == 16 or result.get("rc") == 8
-def test_add_with_wrong_volume(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", volume="", state="present", force_dynamic=True)
- test_info['state'] = 'present'
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", volume="", state="present", force_dynamic=True)
+ test_info['state'] = 'present'
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ test_info['volume'] = 'T12345'
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
+ # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0
+ assert result.get("rc") == 16 or result.get("rc") == 8
+ finally:
+ clean_test_env(hosts, test_info)
+
+
+def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
+ test_info['state'] = 'present'
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name'])
hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- test_info['volume'] = 'T12345'
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0
- assert result.get("rc") == 16 or result.get("rc") == 8
- clean_test_env(hosts, test_info)
-
-
-def test_persist_invalid_ds_format(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
- test_info['state'] = 'present'
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name'])
- hosts.all.shell(cmd=cmdStr)
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 8
- clean_test_env(hosts, test_info)
-
-
-def test_persist_invalid_marker(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
- test_info['state'] = 'present'
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
- for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- test_info['persistent']['marker'] = "# Invalid marker format"
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("rc") == 4
- clean_test_env(hosts, test_info)
-
-
-def test_persist_invalid_marker_len(ansible_zos_module):
- hosts = ansible_zos_module
- test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
- test_info['state'] = 'present'
- ds = get_tmp_ds_name(3,2)
- hosts.all.shell(cmd="dtouch -tseq {0}".format(ds))
- test_info['library'] = ds
- if test_info.get('volume') is not None:
- cmdStr = "dls -l " + ds + " | awk '{print $5}' "
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 8
+ finally:
+ clean_test_env(hosts, test_info)
+
+
+def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
+ test_info['state'] = 'present'
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ test_info['persistent']['marker'] = "# Invalid marker format"
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- vol = result.get("stdout")
- test_info['volume'] = vol
- if test_info.get('persistent'):
- cmdStr = "mvstmp APFTEST.PRST"
- results = hosts.all.shell(cmd=cmdStr)
+ assert result.get("rc") == 4
+ finally:
+ clean_test_env(hosts, test_info)
+
+
+def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds):
+ try:
+ hosts = ansible_zos_module
+ VolumeHandler = Volume_Handler(volumes_with_vvds)
+ volume = VolumeHandler.get_available_vol()
+ test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True)
+ test_info['state'] = 'present'
+ ds = get_tmp_ds_name(3,2)
+ hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ")
+ test_info['library'] = ds
+ if test_info.get('volume') is not None:
+ cmdStr = "dls -l " + ds + " | awk '{print $5}' "
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ vol = result.get("stdout")
+ test_info['volume'] = vol
+ if test_info.get('persistent'):
+ cmdStr = "mvstmp APFTEST.PRST"
+ results = hosts.all.shell(cmd=cmdStr)
+ for result in results.contacted.values():
+ prstds = result.get("stdout")
+ prstds = prstds[:30]
+ cmdStr = "dtouch -tseq {0}".format(prstds)
+ hosts.all.shell(cmd=cmdStr)
+ test_info['persistent']['data_set_name'] = prstds
+ test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o")
+ results = hosts.all.zos_apf(**test_info)
for result in results.contacted.values():
- prstds = result.get("stdout")
- prstds = prstds[:30]
- cmdStr = "dtouch -tseq {0}".format(prstds)
- hosts.all.shell(cmd=cmdStr)
- test_info['persistent']['data_set_name'] = prstds
- test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o")
- results = hosts.all.zos_apf(**test_info)
- for result in results.contacted.values():
- assert result.get("msg") == 'marker length may not exceed 72 characters'
- clean_test_env(hosts, test_info)
\ No newline at end of file
+ assert result.get("msg") == 'marker length may not exceed 72 characters'
+ finally:
+ clean_test_env(hosts, test_info)
diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py
index 2c4091692..e01994138 100644
--- a/tests/functional/modules/test_zos_archive_func.py
+++ b/tests/functional/modules/test_zos_archive_func.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2023
+# Copyright (c) IBM Corporation 2023, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -40,9 +40,9 @@
int main(int argc, char** argv)
{
char dsname[ strlen(argv[1]) + 4];
- sprintf(dsname, "//'%s'", argv[1]);
+ sprintf(dsname, \\\"//'%s'\\\", argv[1]);
FILE* member;
- member = fopen(dsname, "rb,type=record");
+ member = fopen(dsname, \\\"rb,type=record\\\");
sleep(300);
fclose(member);
return 0;
@@ -336,16 +336,16 @@ def test_uss_archive_remove_targets(ansible_zos_module, format):
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]),
]
)
@pytest.mark.parametrize(
"record_length", [80, 120]
)
@pytest.mark.parametrize(
- "record_format", ["FB", "VB"],
+ "record_format", ["fb", "vb"],
)
def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format):
try:
@@ -366,7 +366,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record
replace=True,
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{src_data_set}({member})",
@@ -375,7 +375,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record
)
# Write some content into src the same size of the record,
# need to reduce 4 from V and VB due to RDW
- if record_format in ["V", "VB"]:
+ if record_format in ["v", "vb"]:
test_line = "a" * (record_length - 4)
else:
test_line = "a" * record_length
@@ -388,7 +388,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record
format_dict = dict(name=format)
if format == "terse":
- format_dict["format_options"] = dict(terse_pack="SPACK")
+ format_dict["format_options"] = dict(terse_pack="spack")
archive_result = hosts.all.zos_archive(
src=src_data_set,
dest=archive_data_set,
@@ -415,16 +415,16 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]),
]
)
@pytest.mark.parametrize(
"record_length", [80, 120]
)
@pytest.mark.parametrize(
- "record_format", ["FB", "VB"],
+ "record_format", ["fb", "vb"],
)
def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format):
try:
@@ -445,7 +445,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data
replace=True,
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{src_data_set}({member})",
@@ -454,7 +454,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data
)
# Write some content into src the same size of the record,
# need to reduce 4 from V and VB due to RDW
- if record_format in ["V", "VB"]:
+ if record_format in ["v", "vb"]:
test_line = "a" * (record_length - 4)
else:
test_line = "a" * record_length
@@ -468,7 +468,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data
format_dict = dict(name=format)
format_dict["format_options"] = dict(use_adrdssu=True)
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
archive_result = hosts.all.zos_archive(
src=src_data_set,
dest=archive_data_set,
@@ -495,9 +495,9 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]),
]
)
def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set):
@@ -514,11 +514,11 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d
name=src_data_set,
type=data_set.get("dstype"),
state="present",
- record_format="FB",
+ record_format="fb",
replace=True,
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{src_data_set}({member})",
@@ -536,7 +536,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d
format_dict = dict(name=format)
if format == "terse":
- format_dict["format_options"] = dict(terse_pack="SPACK")
+ format_dict["format_options"] = dict(terse_pack="spack")
archive_result = hosts.all.zos_archive(
src=src_data_set,
dest=archive_data_set,
@@ -566,9 +566,9 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set):
@@ -582,7 +582,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set):
n=3,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -600,7 +600,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set):
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
src="{0}*".format(src_data_set),
@@ -629,9 +629,9 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set):
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set):
@@ -645,7 +645,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma
n=3,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -663,7 +663,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
exclude = "{0}1".format(src_data_set)
archive_result = hosts.all.zos_archive(
@@ -697,9 +697,9 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set):
@@ -713,7 +713,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d
n=3,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -731,7 +731,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
src="{0}*".format(src_data_set),
@@ -762,9 +762,9 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set):
@@ -778,7 +778,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format,
n=3,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -801,7 +801,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format,
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
src=path_list,
@@ -836,9 +836,9 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format,
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2"]),
]
)
def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set):
@@ -858,7 +858,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_
replace=True,
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{src_data_set}({member})",
@@ -876,15 +876,13 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_
format_dict = dict(name=format)
if format == "terse":
- format_dict["format_options"] = dict(terse_pack="SPACK")
+ format_dict["format_options"] = dict(terse_pack="spack")
# copy/compile c program and copy jcl to hold data set lock for n seconds in background(&)
- hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True)
- hosts.all.zos_copy(
- content=call_c_jcl.format(ds_to_write),
- dest='/tmp/disp_shr/call_c_pgm.jcl',
- force=True
- )
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c'))
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(
+ call_c_jcl.format(ds_to_write),
+ '/tmp/disp_shr/call_c_pgm.jcl'))
hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/")
# submit jcl
diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py
index 1b44ec124..a33f13daa 100644
--- a/tests/functional/modules/test_zos_backup_restore.py
+++ b/tests/functional/modules/test_zos_backup_restore.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -26,7 +26,7 @@
DATA_SET_QUALIFIER2 = "{0}.PRIVATE.TESTDS2"
DATA_SET_BACKUP_LOCATION = "MY.BACKUP"
UNIX_BACKUP_LOCATION = "/tmp/mybackup.dzp"
-NEW_HLQ = "NEWHLQ"
+NEW_HLQ = "TMPHLQ"
DATA_SET_RESTORE_LOCATION = DATA_SET_QUALIFIER.format(NEW_HLQ)
DATA_SET_RESTORE_LOCATION2 = DATA_SET_QUALIFIER2.format(NEW_HLQ)
@@ -73,6 +73,10 @@ def delete_data_set(hosts, data_set_name):
def delete_file(hosts, path):
hosts.all.file(path=path, state="absent")
+def delete_remnants(hosts):
+ hosts.all.shell(cmd="drm 'ANSIBLE.*'")
+ hosts.all.shell(cmd="drm 'TEST.*'")
+ hosts.all.shell(cmd="drm 'TMPHLQ.*'")
def get_unused_volume_serial(hosts):
found = False
@@ -87,7 +91,6 @@ def is_volume(hosts, volume):
results = hosts.all.shell(cmd="vtocls ${volume}")
failed = False
for result in results.contacted.values():
- print(result)
if result.get("failed", False) is True:
failed = True
if result.get("rc", 0) > 0:
@@ -213,6 +216,7 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover)
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, backup_name)
+ delete_remnants(hosts)
@pytest.mark.parametrize(
@@ -249,6 +253,7 @@ def test_backup_of_data_set_when_backup_dest_exists(
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, backup_name)
+ delete_remnants(hosts)
@pytest.mark.parametrize(
@@ -269,6 +274,7 @@ def test_backup_and_restore_of_data_set(
):
hosts = ansible_zos_module
data_set_name = get_tmp_ds_name()
+ new_hlq = NEW_HLQ
try:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, backup_name)
@@ -282,34 +288,36 @@ def test_backup_and_restore_of_data_set(
overwrite=overwrite,
recover=recover,
)
+ if not overwrite:
+ new_hlq = "TEST"
assert_module_did_not_fail(results)
assert_data_set_or_file_exists(hosts, backup_name)
results = hosts.all.zos_backup_restore(
operation="restore",
backup_name=backup_name,
- hlq=NEW_HLQ,
+ hlq=new_hlq,
overwrite=overwrite,
)
assert_module_did_not_fail(results)
finally:
delete_data_set_or_file(hosts, data_set_name)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, backup_name)
+ delete_remnants(hosts)
@pytest.mark.parametrize(
"backup_name,space,space_type",
[
- (DATA_SET_BACKUP_LOCATION, 10, "M"),
- (DATA_SET_BACKUP_LOCATION, 10000, "K"),
+ (DATA_SET_BACKUP_LOCATION, 10, "m"),
+ (DATA_SET_BACKUP_LOCATION, 10000, "k"),
(DATA_SET_BACKUP_LOCATION, 10, None),
- (DATA_SET_BACKUP_LOCATION, 2, "CYL"),
- (DATA_SET_BACKUP_LOCATION, 10, "TRK"),
- (UNIX_BACKUP_LOCATION, 10, "M"),
- (UNIX_BACKUP_LOCATION, 10000, "K"),
+ (DATA_SET_BACKUP_LOCATION, 2, "cyl"),
+ (DATA_SET_BACKUP_LOCATION, 10, "trk"),
+ (UNIX_BACKUP_LOCATION, 10, "m"),
+ (UNIX_BACKUP_LOCATION, 10000, "k"),
(UNIX_BACKUP_LOCATION, 10, None),
- (UNIX_BACKUP_LOCATION, 2, "CYL"),
- (UNIX_BACKUP_LOCATION, 10, "TRK"),
+ (UNIX_BACKUP_LOCATION, 2, "cyl"),
+ (UNIX_BACKUP_LOCATION, 10, "trk"),
],
)
def test_backup_and_restore_of_data_set_various_space_measurements(
@@ -348,8 +356,8 @@ def test_backup_and_restore_of_data_set_various_space_measurements(
assert_module_did_not_fail(results)
finally:
delete_data_set_or_file(hosts, data_set_name)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, backup_name)
+ delete_remnants(hosts)
@pytest.mark.parametrize(
@@ -397,8 +405,8 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists(
assert_module_failed(results)
finally:
delete_data_set_or_file(hosts, data_set_name)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, backup_name)
+ delete_remnants(hosts)
def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module):
@@ -428,15 +436,13 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module):
backup_name=DATA_SET_BACKUP_LOCATION,
overwrite=True,
recover=True,
- hlq=NEW_HLQ,
)
assert_module_did_not_fail(results)
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, data_set_name2)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2)
delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION)
+ delete_remnants(hosts)
def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module):
@@ -473,9 +479,8 @@ def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module):
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, data_set_name2)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2)
delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION)
+ delete_remnants(hosts)
def test_backup_and_restore_exclude_from_pattern(ansible_zos_module):
@@ -485,7 +490,6 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module):
try:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, data_set_name2)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2)
delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION)
create_sequential_data_set_with_contents(
@@ -514,9 +518,9 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module):
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, data_set_name2)
- delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2)
delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION)
+ delete_remnants(hosts)
@pytest.mark.parametrize(
@@ -545,7 +549,7 @@ def test_restore_of_data_set_when_backup_does_not_exist(
finally:
delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, backup_name)
-
+ delete_remnants(hosts)
@pytest.mark.parametrize(
"backup_name",
@@ -574,7 +578,7 @@ def test_backup_of_data_set_when_data_set_does_not_exist(
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, backup_name)
-
+ delete_remnants(hosts)
def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module):
hosts = ansible_zos_module
@@ -597,6 +601,7 @@ def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module):
finally:
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION)
+ delete_remnants(hosts)
def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module):
@@ -629,6 +634,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module):
delete_data_set_or_file(hosts, data_set_name)
delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION)
delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION)
+ delete_remnants(hosts)
# def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module):
@@ -688,7 +694,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module):
# backup_name=DATA_SET_BACKUP_LOCATION,
# overwrite=True,
# space=500,
-# space_type="M",
+# space_type="m",
# )
# assert_module_did_not_fail(results)
# assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION)
@@ -701,7 +707,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module):
# full_volume=True,
# sms_storage_class="DB2SMS10",
# space=500,
-# space_type="M",
+# space_type="m",
# )
# assert_module_did_not_fail(results)
# assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME)
diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py
index 9c3df3dd8..3bd132bcc 100644
--- a/tests/functional/modules/test_zos_blockinfile_func.py
+++ b/tests/functional/modules/test_zos_blockinfile_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -30,9 +30,9 @@
int main(int argc, char** argv)
{
char dsname[ strlen(argv[1]) + 4];
- sprintf(dsname, "//'%s'", argv[1]);
+ sprintf(dsname, \\\"//'%s'\\\", argv[1]);
FILE* member;
- member = fopen(dsname, "rb,type=record");
+ member = fopen(dsname, \\\"rb,type=record\\\");
sleep(300);
fclose(member);
return 0;
@@ -428,10 +428,10 @@
ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8']
# supported data set types
-DS_TYPE = ['SEQ', 'PDS', 'PDSE']
+DS_TYPE = ['seq', 'pds', 'pdse']
# not supported data set types
-NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS']
+NS_DS_TYPE = ['esds', 'rrds', 'lds']
USS_BACKUP_FILE = "/tmp/backup.tmp"
BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"]
@@ -450,7 +450,7 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT)
hosts = ansible_zos_module
hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE))
hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE)
- if DS_TYPE in ["PDS", "PDSE"]:
+ if DS_TYPE in ["pds", "pdse"]:
DS_FULL_NAME = DS_NAME + "(MEM)"
hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member")
cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME)
@@ -481,6 +481,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module):
params["path"] = full_path
results = hosts.all.zos_blockinfile(**params)
for result in results.contacted.values():
+ print(result)
assert result.get("changed") == 1
results = hosts.all.shell(cmd="cat {0}".format(params["path"]))
for result in results.contacted.values():
@@ -862,11 +863,12 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module):
finally:
remove_uss_environment(ansible_zos_module)
-
+# Test case base on bug of dataset.blockifile
+# GH Issue #1258
@pytest.mark.uss
def test_uss_block_insert_with_doublequotes(ansible_zos_module):
hosts = ansible_zos_module
- params = dict(insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present")
+ params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present")
full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3]
content = TEST_CONTENT_DOUBLEQUOTES
try:
@@ -874,6 +876,7 @@ def test_uss_block_insert_with_doublequotes(ansible_zos_module):
params["path"] = full_path
results = hosts.all.zos_blockinfile(**params)
for result in results.contacted.values():
+ print(result)
assert result.get("changed") == 1
results = hosts.all.shell(cmd="cat {0}".format(params["path"]))
for result in results.contacted.values():
@@ -1136,7 +1139,7 @@ def test_ds_block_absent(ansible_zos_module, dstype):
def test_ds_tmp_hlq_option(ansible_zos_module):
# This TMPHLQ only works with sequential datasets
hosts = ansible_zos_module
- ds_type = "SEQ"
+ ds_type = "seq"
params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ")
kwargs = dict(backup_name=r"TMPHLQ\..")
content = TEST_CONTENT
@@ -1226,7 +1229,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype):
MEMBER_1, MEMBER_2 = "MEM1", "MEM2"
TEMP_FILE = "/tmp/{0}".format(MEMBER_2)
content = TEST_CONTENT
- if ds_type == "SEQ":
+ if ds_type == "seq":
params["path"] = default_data_set_name+".{0}".format(MEMBER_2)
else:
params["path"] = default_data_set_name+"({0})".format(MEMBER_2)
@@ -1243,7 +1246,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype):
]
)
# write memeber to verify cases
- if ds_type in ["PDS", "PDSE"]:
+ if ds_type in ["pds", "pdse"]:
cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"])
else:
cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"])
@@ -1252,12 +1255,11 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype):
for result in results.contacted.values():
assert int(result.get("stdout")) != 0
# copy/compile c program and copy jcl to hold data set lock for n seconds in background(&)
- hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True)
- hosts.all.zos_copy(
- content=call_c_jcl.format(default_data_set_name, MEMBER_1),
- dest='/tmp/disp_shr/call_c_pgm.jcl',
- force=True
- )
+ hosts.all.file(path="/tmp/disp_shr/", state="directory")
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c'))
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(
+ call_c_jcl.format(default_data_set_name, MEMBER_1),
+ '/tmp/disp_shr/call_c_pgm.jcl'))
hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/")
hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/")
time.sleep(5)
@@ -1304,6 +1306,71 @@ def test_uss_encoding(ansible_zos_module, encoding):
remove_uss_environment(ansible_zos_module)
+@pytest.mark.ds
+@pytest.mark.parametrize("dstype", DS_TYPE)
+@pytest.mark.parametrize("encoding", ["IBM-1047"])
+def test_ds_encoding(ansible_zos_module, encoding, dstype):
+ hosts = ansible_zos_module
+ ds_type = dstype
+ insert_data = "Insert this string"
+ params = dict(insertafter="SIMPLE", block=insert_data, state="present")
+ params["encoding"] = encoding
+ ds_name = get_tmp_ds_name()
+ temp_file = "/tmp/" + ds_name
+ content = "SIMPLE LINE TO VERIFY"
+ try:
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file))
+ hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"])
+ hosts.all.zos_data_set(name=ds_name, type=ds_type)
+ if ds_type in ["pds", "pdse"]:
+ ds_full_name = ds_name + "(MEM)"
+ hosts.all.zos_data_set(name=ds_full_name, state="present", type="member")
+ cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name)
+ else:
+ ds_full_name = ds_name
+ cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name)
+ hosts.all.shell(cmd=cmdStr)
+ hosts.all.shell(cmd="rm -rf " + temp_file)
+ params["path"] = ds_full_name
+ results = hosts.all.zos_blockinfile(**params)
+ for result in results.contacted.values():
+ assert result.get("changed") == 1
+ hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047")
+ results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
+ for result in results.contacted.values():
+ assert result.get("stdout") == EXPECTED_ENCODING
+ finally:
+ remove_ds_environment(ansible_zos_module, ds_name)
+
+
+#########################
+# Encoding tests
+#########################
+@pytest.mark.uss
+@pytest.mark.parametrize("encoding", ENCODING)
+def test_uss_encoding(ansible_zos_module, encoding):
+ hosts = ansible_zos_module
+ insert_data = "Insert this string"
+ params = dict(insertafter="SIMPLE", block=insert_data, state="present")
+ params["encoding"] = encoding
+ full_path = TEST_FOLDER_BLOCKINFILE + encoding
+ content = "SIMPLE LINE TO VERIFY"
+ try:
+ hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE))
+ hosts.all.file(path=full_path, state="touch")
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path))
+ hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"])
+ params["path"] = full_path
+ results = hosts.all.zos_blockinfile(**params)
+ for result in results.contacted.values():
+ assert result.get("changed") == 1
+ results = hosts.all.shell(cmd="cat {0}".format(params["path"]))
+ for result in results.contacted.values():
+ assert result.get("stdout") == EXPECTED_ENCODING
+ finally:
+ remove_uss_environment(ansible_zos_module)
+
+
@pytest.mark.ds
@pytest.mark.parametrize("dstype", DS_TYPE)
@pytest.mark.parametrize("encoding", ["IBM-1047"])
@@ -1422,7 +1489,7 @@ def test_not_exist_ds_block_insertafter_regex(ansible_zos_module):
@pytest.mark.ds
def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module):
hosts = ansible_zos_module
- ds_type = 'SEQ'
+ ds_type = 'seq'
params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present")
params["insertafter"] = 'SOME_NON_EXISTING_PATTERN'
ds_name = get_tmp_ds_name()
@@ -1474,8 +1541,9 @@ def test_ds_not_supported(ansible_zos_module, dstype):
hosts.all.zos_data_set(name=ds_name, state="absent")
+# Enhancemed #1339
@pytest.mark.ds
-@pytest.mark.parametrize("dstype", ["PDS","PDSE"])
+@pytest.mark.parametrize("dstype", ["pds","pdse"])
def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype):
hosts = ansible_zos_module
ds_type = dstype
@@ -1503,12 +1571,11 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype):
for result in results.contacted.values():
assert int(result.get("stdout")) != 0
# copy/compile c program and copy jcl to hold data set lock for n seconds in background(&)
- hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True)
- hosts.all.zos_copy(
- content=call_c_jcl.format(default_data_set_name, MEMBER_1),
- dest='/tmp/disp_shr/call_c_pgm.jcl',
- force=True
- )
+ hosts.all.file(path="/tmp/disp_shr/", state="directory")
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c'))
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(
+ call_c_jcl.format(default_data_set_name, MEMBER_1),
+ '/tmp/disp_shr/call_c_pgm.jcl'))
hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/")
hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/")
time.sleep(5)
diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py
index 659461abb..086b7d27e 100644
--- a/tests/functional/modules/test_zos_copy_func.py
+++ b/tests/functional/modules/test_zos_copy_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020 - 2024
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -259,7 +259,7 @@ def populate_partitioned_data_set(hosts, name, ds_type, members=None):
Arguments:
hosts (object) -- Ansible instance(s) that can call modules.
name (str) -- Name of the data set.
- ds_type (str) -- Type of the data set (either PDS or PDSE).
+ ds_type (str) -- Type of the data set (either pds or pdse).
members (list, optional) -- List of member names to create.
"""
if not members:
@@ -281,9 +281,9 @@ def get_listcat_information(hosts, name, ds_type):
Arguments:
hosts (object) -- Ansible instance(s) that can call modules.
name (str) -- Name of the data set.
- ds_type (str) -- Type of data set ("SEQ", "PDS", "PDSE", "KSDS").
+ ds_type (str) -- Type of data set ("seq", "pds", "pdse", "ksds").
"""
- if ds_type.upper() == "KSDS":
+ if ds_type == "ksds":
idcams_input = " LISTCAT ENT('{0}') DATA ALL".format(name)
else:
idcams_input = " LISTCAT ENTRIES('{0}')".format(name)
@@ -310,7 +310,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None,
Arguments:
hosts (object) -- Ansible instance(s) that can call modules.
name (str) -- Name of the VSAM data set.
- type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS)
+ type (str) -- Type of the VSAM (ksds, esds, rrds, lds)
add_data (bool, optional) -- Whether to add records to the VSAM.
key_length (int, optional) -- Key length (only for KSDS data sets).
key_offset (int, optional) -- Key offset (only for KSDS data sets).
@@ -320,7 +320,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None,
type=ds_type,
state="present"
)
- if ds_type == "KSDS":
+ if ds_type == "ksds":
params["key_length"] = key_length
params["key_offset"] = key_offset
@@ -369,7 +369,7 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo
# Submit link JCL.
job_result = hosts.all.zos_job_submit(
src="/tmp/link.jcl",
- location="USS",
+ location="uss",
wait_time_s=60
)
for result in job_result.contacted.values():
@@ -793,6 +793,12 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_
@pytest.mark.uss
@pytest.mark.parametrize("copy_directory", [False, True])
def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory):
+ """
+ This test evaluates the behavior of testing copy of a directory when src ends
+ with '/' versus only the dir name. Expectation is that when only dir name is provided
+ that directory is also created on the remote, when directory name ends with '/'
+ this means we only copy that directory contents without creating it on the remote.
+ """
hosts = ansible_zos_module
dest_path = "/tmp/new_dir"
@@ -1884,7 +1890,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module):
name=src,
state="present",
type="seq",
- record_format="FBA",
+ record_format="fba",
record_length=80,
block_size=27920,
replace=True
@@ -1959,13 +1965,13 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module,
@pytest.mark.seq
-@pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"])
+@pytest.mark.parametrize("ds_type", [ "pds", "pdse", "seq"])
def test_copy_dest_lock(ansible_zos_module, ds_type):
hosts = ansible_zos_module
data_set_1 = get_tmp_ds_name()
data_set_2 = get_tmp_ds_name()
member_1 = "MEM1"
- if ds_type == "PDS" or ds_type == "PDSE":
+ if ds_type == "pds" or ds_type == "pdse":
src_data_set = data_set_1 + "({0})".format(member_1)
dest_data_set = data_set_2 + "({0})".format(member_1)
else:
@@ -1975,7 +1981,7 @@ def test_copy_dest_lock(ansible_zos_module, ds_type):
hosts = ansible_zos_module
hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True)
hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True)
- if ds_type == "PDS" or ds_type == "PDSE":
+ if ds_type == "pds" or ds_type == "pdse":
hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True)
hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True)
# copy text_in source
@@ -2566,7 +2572,7 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src):
name=data_set,
type="pdse",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
replace=True
@@ -2612,7 +2618,7 @@ def test_copy_file_to_existing_member(ansible_zos_module, src):
name=data_set,
type="pdse",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
replace=True
@@ -2852,7 +2858,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type):
name=dest,
type=src_type,
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
)
@@ -2974,7 +2980,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr
state="present",
type="pds",
space_primary=2,
- record_format="FB",
+ record_format="fb",
record_length=80,
block_size=3120,
replace=True,
@@ -2984,11 +2990,11 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr
name=src_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3006,11 +3012,11 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr
name=dest_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
# pre-allocate dest loadlib to copy over with an alias.
@@ -3018,11 +3024,11 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr
name=dest_lib_aliases,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3111,11 +3117,11 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module):
name=src_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
hosts.all.zos_data_set(
@@ -3123,7 +3129,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module):
state="present",
type="pds",
space_primary=2,
- record_format="FB",
+ record_format="fb",
record_length=80,
block_size=3120,
replace=True,
@@ -3132,22 +3138,22 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module):
name=dest_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
hosts.all.zos_data_set(
name=dest_lib_aliases,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3262,7 +3268,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
state="present",
type="pds",
space_primary=2,
- record_format="FB",
+ record_format="fb",
record_length=80,
block_size=3120,
replace=True,
@@ -3272,11 +3278,11 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
name=src_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3300,11 +3306,11 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
name=dest_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
# allocate dest loadlib to copy over with an alias.
@@ -3312,11 +3318,11 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
name=dest_lib_aliases,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3330,12 +3336,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
executable=True,
aliases=False,
dest_data_set={
- 'type': "LIBRARY",
- 'record_format': "U",
+ 'type': "library",
+ 'record_format': "u",
'record_length': 0,
'block_size': 32760,
'space_primary': 2,
- 'space_type': "M",
+ 'space_type': "m",
}
)
# copy src loadlib to dest library pds w aliases
@@ -3346,12 +3352,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
executable=True,
aliases=True,
dest_data_set={
- 'type': "LIBRARY",
- 'record_format': "U",
+ 'type': "library",
+ 'record_format': "u",
'record_length': 0,
'block_size': 32760,
'space_primary': 2,
- 'space_type': "M",
+ 'space_type': "m",
}
)
@@ -3435,7 +3441,6 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
@pytest.mark.aliases
@pytest.mark.parametrize("is_created", [False, True])
def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
-
hosts = ansible_zos_module
mlq_s = 3
cobol_src_pds = get_tmp_ds_name(mlq_s)
@@ -3445,6 +3450,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
dest_lib = get_tmp_ds_name(mlq_s)
pgm_mem = "HELLO"
pgm2_mem = "HELLO2"
+ uss_location = "/tmp/loadlib"
try:
@@ -3454,7 +3460,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
state="present",
type="pds",
space_primary=2,
- record_format="FB",
+ record_format="fb",
record_length=80,
block_size=3120,
replace=True,
@@ -3464,11 +3470,11 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
name=src_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3488,11 +3494,32 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
validate_loadlib_pgm(hosts, steplib=src_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR)
# fetch loadlib into local
- tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch")
- # fetch loadlib to local
- fetch_result = hosts.all.zos_fetch(src=src_lib, dest=tmp_folder.name, is_binary=True)
- for res in fetch_result.contacted.values():
- source_path = res.get("dest")
+ # Copying the loadlib to USS.
+ hosts.all.file(name=uss_location, state='directory')
+ hosts.all.shell(
+ cmd=f"dcp -X -I \"{src_lib}\" {uss_location}",
+ executable=SHELL_EXECUTABLE
+ )
+
+ # Copying the remote loadlibs in USS to a local dir.
+ # This section ONLY handles ONE host, so if we ever use multiple hosts to
+ # test, we will need to update this code.
+ remote_user = hosts["options"]["user"]
+ # Removing a trailing comma because the framework saves the hosts list as a
+ # string instead of a list.
+ remote_host = hosts["options"]["inventory"].replace(",", "")
+
+ tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch")
+ cmd = [
+ "sftp",
+ "-r",
+ f"{remote_user}@{remote_host}:{uss_location}",
+ f"{tmp_folder.name}"
+ ]
+ with subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE) as sftp_proc:
+ result = sftp_proc.stdout.read()
+
+ source_path = os.path.join(tmp_folder.name, os.path.basename(uss_location))
if not is_created:
# ensure dest data sets absent for this variation of the test case.
@@ -3503,11 +3530,11 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
name=dest_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3520,12 +3547,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
executable=True,
aliases=False,
dest_data_set={
- 'type': "PDSE",
- 'record_format': "U",
+ 'type': "pdse",
+ 'record_format': "u",
'record_length': 0,
'block_size': 32760,
'space_primary': 2,
- 'space_type': "M",
+ 'space_type': "m",
}
)
else:
@@ -3563,6 +3590,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created):
hosts.all.zos_data_set(name=cobol_src_pds, state="absent")
hosts.all.zos_data_set(name=src_lib, state="absent")
hosts.all.zos_data_set(name=dest_lib, state="absent")
+ hosts.all.file(name=uss_location, state="absent")
@pytest.mark.pdse
@@ -3594,7 +3622,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module):
state="present",
type="pds",
space_primary=2,
- record_format="FB",
+ record_format="fb",
record_length=80,
block_size=3120,
replace=True,
@@ -3604,11 +3632,11 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module):
name=src_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3629,11 +3657,11 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module):
name=dest_lib,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
# allocate dest loadlib to copy over with an alias.
@@ -3641,11 +3669,11 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module):
name=dest_lib_aliases,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
@@ -3805,11 +3833,11 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created):
name=dest,
state="present",
type="pdse",
- record_format="U",
+ record_format="u",
record_length=0,
block_size=32760,
space_primary=2,
- space_type="M",
+ space_type="m",
replace=True
)
copy_uss_to_mvs_res = hosts.all.zos_copy(
@@ -4238,7 +4266,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type):
hosts.all.zos_data_set(
type=dest_type,
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=25,
)
@@ -4324,17 +4352,21 @@ def test_backup_pds(ansible_zos_module, args):
def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type):
hosts = ansible_zos_module
source = get_tmp_ds_name()
+ source_member = f"{source}(MEM)"
dest = get_tmp_ds_name()
volumes = Volume_Handler(volumes_on_systems)
volume_1 = volumes.get_available_vol()
+
if volume_1 == "SCR03":
volume = volumes.get_available_vol()
volumes.free_vol(volume_1)
volume_1 = volume
- source_member = source + "(MEM)"
+
try:
hosts.all.zos_data_set(name=source, type=src_type, state='present')
- hosts.all.zos_data_set(name=source_member, type="member", state='present')
+ if src_type != "seq":
+ hosts.all.zos_data_set(name=source_member, type="member", state='present')
+
copy_res = hosts.all.zos_copy(
src=source,
dest=dest,
@@ -4384,7 +4416,6 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module):
finally:
hosts.all.zos_data_set(name=dest_ds, state="absent")
-
@pytest.mark.vsam
@pytest.mark.parametrize("force", [False, True])
def test_copy_ksds_to_existing_ksds(ansible_zos_module, force):
@@ -4393,8 +4424,8 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force):
dest_ds = get_tmp_ds_name()
try:
- create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0)
- create_vsam_data_set(hosts, dest_ds, "KSDS", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, dest_ds, "ksds", add_data=True, key_length=12, key_offset=0)
copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, force=force)
verify_copy = get_listcat_information(hosts, dest_ds, "ksds")
@@ -4429,8 +4460,8 @@ def test_backup_ksds(ansible_zos_module, backup):
backup_name = None
try:
- create_vsam_data_set(hosts, src, "KSDS", add_data=True, key_length=12, key_offset=0)
- create_vsam_data_set(hosts, dest, "KSDS", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, src, "ksds", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, dest, "ksds", add_data=True, key_length=12, key_offset=0)
if backup:
copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup, remote_src=True, force=True)
@@ -4512,8 +4543,8 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems):
volume = volumes.get_available_vol()
space_primary = 3
space_secondary = 2
- space_type = "K"
- record_format = "VB"
+ space_type = "k"
+ record_format = "vb"
record_length = 100
block_size = 21000
@@ -4524,7 +4555,7 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems):
remote_src=True,
volume=volume,
dest_data_set=dict(
- type="SEQ",
+ type="seq",
space_primary=space_primary,
space_secondary=space_secondary,
space_type=space_type,
@@ -4555,7 +4586,7 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems):
assert len(output_lines) == 5
data_set_attributes = output_lines[2].strip().split()
assert len(data_set_attributes) == 4
- assert data_set_attributes[0] == record_format
+ assert data_set_attributes[0] == record_format.upper()
assert data_set_attributes[1] == str(record_length)
assert data_set_attributes[2] == str(block_size)
assert data_set_attributes[3] == "PS"
diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py
index f5568f55e..7dc0ed7f3 100644
--- a/tests/functional/modules/test_zos_data_set_func.py
+++ b/tests/functional/modules/test_zos_data_set_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -27,6 +27,7 @@
# TODO: determine if data set names need to be more generic for testcases
# TODO: add additional tests to check additional data set creation parameter combinations
+
data_set_types = [
("pds"),
("seq"),
@@ -153,22 +154,24 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst
dataset = get_tmp_ds_name(2, 2)
try:
hosts.all.zos_data_set(
- name=dataset, state="cataloged", volumes=volume_1
+ name=dataset, state="cataloged", volumes=volume_1
)
hosts.all.zos_data_set(name=dataset, state="absent")
hosts.all.file(path=TEMP_PATH, state="directory")
hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH))
results = hosts.all.zos_job_submit(
- src=TEMP_PATH + "/SAMPLE", location="USS", wait=True, wait_time_s=30
+ src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30
)
# verify data set creation was successful
+
for result in results.contacted.values():
if(result.get("jobs")[0].get("ret_code") is None):
submitted_job_id = result.get("jobs")[0].get("job_id")
assert submitted_job_id is not None
results = hosts.all.zos_job_output(job_id=submitted_job_id)
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
+
# verify first uncatalog was performed
results = hosts.all.zos_data_set(name=dataset, state="uncataloged")
for result in results.contacted.values():
@@ -181,6 +184,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst
results = hosts.all.zos_data_set(
name=dataset, state="cataloged", volumes=volume_1
)
+
for result in results.contacted.values():
assert result.get("changed") is True
# verify second catalog shows catalog already performed
@@ -215,7 +219,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s
hosts.all.file(path=TEMP_PATH, state="directory")
hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH))
results = hosts.all.zos_job_submit(
- src=TEMP_PATH + "/SAMPLE", location="USS", wait=True
+ src=TEMP_PATH + "/SAMPLE", location="uss"
)
# verify data set creation was successful
for result in results.contacted.values():
@@ -234,6 +238,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s
results = hosts.all.zos_data_set(
name=dataset, state="present", volumes=volume_1
)
+
for result in results.contacted.values():
assert result.get("changed") is True
finally:
@@ -260,7 +265,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_
hosts.all.file(path=TEMP_PATH, state="directory")
hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH))
results = hosts.all.zos_job_submit(
- src=TEMP_PATH + "/SAMPLE", location="USS", wait=True
+ src=TEMP_PATH + "/SAMPLE", location="uss"
)
# verify data set creation was successful
for result in results.contacted.values():
@@ -308,7 +313,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy
hosts.all.file(path=TEMP_PATH, state="directory")
hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH))
results = hosts.all.zos_job_submit(
- src=TEMP_PATH + "/SAMPLE", location="USS", wait=True
+ src=TEMP_PATH + "/SAMPLE", location="uss"
)
# verify data set creation was successful
for result in results.contacted.values():
@@ -345,7 +350,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans
hosts.all.file(path=TEMP_PATH, state="directory")
hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH))
- results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True)
+ results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss")
# verify data set creation was successful
for result in results.contacted.values():
@@ -360,7 +365,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans
hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent")
hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH))
- results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True)
+ results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss")
# verify data set creation was successful
for result in results.contacted.values():
@@ -368,11 +373,14 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans
hosts.all.file(path=TEMP_PATH, state="absent")
- # ensure data set absent
- results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1)
+ # ensure second data set absent
+ results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_2)
for result in results.contacted.values():
assert result.get("changed") is True
+ # ensure first data set absent
+ hosts.all.zos_data_set(name=dataset, state="cataloged")
+ results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1)
for result in results.contacted.values():
assert result.get("changed") is True
@@ -401,7 +409,7 @@ def test_data_set_creation_when_present_replace(ansible_zos_module, dstype):
try:
hosts = ansible_zos_module
dataset = get_tmp_ds_name(2, 2)
- hosts.all.zos_data_set(
+ results = hosts.all.zos_data_set(
name=dataset, state="present", type=dstype, replace=True
)
results = hosts.all.zos_data_set(
@@ -581,7 +589,7 @@ def test_data_member_force_delete(ansible_zos_module):
results = hosts.all.zos_data_set(
name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_2),
state="absent",
- type="MEMBER"
+ type="member"
)
for result in results.contacted.values():
assert result.get("failed") is True
@@ -589,7 +597,7 @@ def test_data_member_force_delete(ansible_zos_module):
# attempt to delete MEMBER_3 with force option.
results = hosts.all.zos_data_set(
- name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="MEMBER", force=True
+ name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="member", force=True
)
for result in results.contacted.values():
assert result.get("changed") is True
@@ -601,7 +609,7 @@ def test_data_member_force_delete(ansible_zos_module):
{
"name": "{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_4),
"state": "absent",
- "type": "MEMBER",
+ "type": "member",
"force": True
}
]
@@ -638,9 +646,9 @@ def test_repeated_operations(ansible_zos_module):
DEFAULT_DATA_SET_NAME_WITH_MEMBER = DEFAULT_DATA_SET_NAME + "(MEM)"
results = hosts.all.zos_data_set(
name=DEFAULT_DATA_SET_NAME,
- type="PDS",
+ type="pds",
space_primary=5,
- space_type="CYL",
+ space_type="cyl",
record_length=15,
replace=True,
)
@@ -651,7 +659,7 @@ def test_repeated_operations(ansible_zos_module):
results = hosts.all.zos_data_set(
name=DEFAULT_DATA_SET_NAME,
- type="PDS",
+ type="pds",
replace=True,
)
@@ -660,7 +668,7 @@ def test_repeated_operations(ansible_zos_module):
assert result.get("module_stderr") is None
results = hosts.all.zos_data_set(
- name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", replace=True
+ name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", replace=True
)
for result in results.contacted.values():
@@ -668,7 +676,7 @@ def test_repeated_operations(ansible_zos_module):
assert result.get("module_stderr") is None
results = hosts.all.zos_data_set(
- name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER"
+ name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member"
)
for result in results.contacted.values():
@@ -676,7 +684,7 @@ def test_repeated_operations(ansible_zos_module):
assert result.get("module_stderr") is None
results = hosts.all.zos_data_set(
- name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", state="absent"
+ name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent"
)
for result in results.contacted.values():
@@ -684,7 +692,7 @@ def test_repeated_operations(ansible_zos_module):
assert result.get("module_stderr") is None
results = hosts.all.zos_data_set(
- name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", state="absent"
+ name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent"
)
for result in results.contacted.values():
@@ -704,9 +712,9 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module,
hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent")
results = hosts.all.zos_data_set(
name=DEFAULT_DATA_SET_NAME,
- type="SEQ",
+ type="seq",
space_primary=5,
- space_type="CYL",
+ space_type="cyl",
record_length=15,
volumes=[volume_1, volume_2],
)
@@ -741,11 +749,11 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo
hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent")
results = hosts.all.zos_data_set(
name=DEFAULT_DATA_SET_NAME,
- type="KSDS",
+ type="ksds",
key_length=5,
key_offset=0,
space_primary=5,
- space_type="CYL",
+ space_type="cyl",
volumes=[volume_1, volume_2],
)
for result in results.contacted.values():
@@ -769,27 +777,6 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo
hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent")
-def test_data_set_old_aliases(ansible_zos_module, volumes_on_systems):
- volumes = Volume_Handler(volumes_on_systems)
- volume_1 = volumes.get_available_vol()
- try:
- hosts = ansible_zos_module
- DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2)
- hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent")
- results = hosts.all.zos_data_set(
- name=DEFAULT_DATA_SET_NAME,
- state="present",
- format="fb",
- size="5m",
- volume=volume_1,
- )
- for result in results.contacted.values():
- assert result.get("changed") is True
- assert result.get("module_stderr") is None
- finally:
- hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent")
-
-
def test_data_set_temp_data_set_name(ansible_zos_module):
try:
hosts = ansible_zos_module
@@ -855,7 +842,7 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module):
@pytest.mark.parametrize(
"filesystem",
- ["HFS", "ZFS"],
+ ["hfs", "zfs"],
)
def test_filesystem_create_and_mount(ansible_zos_module, filesystem):
fulltest = True
@@ -864,7 +851,7 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem):
try:
hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent")
- if filesystem == "HFS":
+ if filesystem == "hfs":
result0 = hosts.all.shell(cmd="zinfo -t sys")
for result in result0.contacted.values():
sys_info = result.get("stdout_lines")
@@ -921,7 +908,7 @@ def test_data_set_creation_zero_values(ansible_zos_module):
results = hosts.all.zos_data_set(
name=DEFAULT_DATA_SET_NAME,
state="present",
- type="KSDS",
+ type="ksds",
replace=True,
space_primary=5,
space_secondary=0,
@@ -953,7 +940,7 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module):
@pytest.mark.parametrize(
"formats",
- ["F","FB", "VB", "FBA", "VBA", "U"],
+ ["f","fb", "vb", "fba", "vba", "u"],
)
def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems):
volumes = Volume_Handler(volumes_on_systems)
@@ -966,7 +953,8 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems):
name=DEFAULT_DATA_SET_NAME,
state="present",
format=formats,
- size="5m",
+ space_primary="5",
+ space_type="m",
volume=volume_1,
)
for result in results.contacted.values():
diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py
index fd38e78e6..df01a6133 100644
--- a/tests/functional/modules/test_zos_encode_func.py
+++ b/tests/functional/modules/test_zos_encode_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2022
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -98,7 +98,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None,
type=ds_type,
state="present"
)
- if ds_type == "KSDS":
+ if ds_type == "ksds":
params["key_length"] = key_length
params["key_offset"] = key_offset
@@ -545,7 +545,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module):
cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH)
)
results = hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True
+ src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30
)
for result in results.contacted.values():
@@ -576,7 +576,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module):
hosts = ansible_zos_module
mlq_size = 3
MVS_VS = get_tmp_ds_name(mlq_size)
- create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0)
hosts.all.file(path=USS_DEST_FILE, state="touch")
results = hosts.all.zos_encode(
src=MVS_VS,
@@ -611,7 +611,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module):
hosts = ansible_zos_module
MVS_PS = get_tmp_ds_name()
MVS_VS = get_tmp_ds_name()
- create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0)
hosts.all.zos_data_set(name=MVS_PS, state="absent")
hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH)
results = hosts.all.zos_encode(
@@ -635,7 +635,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module):
hosts = ansible_zos_module
MVS_VS = get_tmp_ds_name()
MVS_PDS = get_tmp_ds_name()
- create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0)
MVS_PDS_MEMBER = MVS_PDS + '(MEM)'
hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH)
hosts.all.zos_data_set(
@@ -671,7 +671,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module):
cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH)
)
results = hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True
+ src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30
)
for result in results.contacted.values():
assert result.get("jobs") is not None
@@ -803,7 +803,7 @@ def test_vsam_backup(ansible_zos_module):
cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH)
)
hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True
+ src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30
)
hosts.all.file(path=TEMP_JCL_PATH, state="absent")
# submit JCL to populate KSDS
@@ -814,7 +814,7 @@ def test_vsam_backup(ansible_zos_module):
)
)
hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True
+ src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30
)
hosts.all.zos_encode(
@@ -972,8 +972,7 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module):
hosts.all.zos_data_set(name=MVS_PS, state="absent")
hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent")
hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq")
- hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH))
- hosts.all.zos_copy(src=TEMP_JCL_PATH, dest=MVS_PS, remote_src=True)
+ hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_FILE_TEXT, MVS_PS))
enc_ds = hosts.all.zos_encode(
src=MVS_PS,
encoding={
diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py
index 02142254b..7fd44651e 100644
--- a/tests/functional/modules/test_zos_fetch_func.py
+++ b/tests/functional/modules/test_zos_fetch_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020 - 2024
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -89,8 +89,8 @@ def extract_member_name(data_set):
def create_and_populate_test_ps_vb(ansible_zos_module, name):
params=dict(
name=name,
- type='SEQ',
- record_format='VB',
+ type='seq',
+ record_format='vb',
record_length='3180',
block_size='3190'
)
@@ -112,7 +112,7 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None)
Arguments:
hosts (object) -- Ansible instance(s) that can call modules.
name (str) -- Name of the VSAM data set.
- type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS)
+ type (str) -- Type of the VSAM (ksds, esds, rrds, lds)
add_data (bool, optional) -- Whether to add records to the VSAM.
key_length (int, optional) -- Key length (only for KSDS data sets).
key_offset (int, optional) -- Key offset (only for KSDS data sets).
@@ -122,7 +122,7 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None)
type=ds_type,
state="present"
)
- if ds_type == "KSDS":
+ if ds_type == "ksds":
params["key_length"] = key_length
params["key_offset"] = key_offset
@@ -188,7 +188,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module):
def test_fetch_sequential_data_set_fixed_block(ansible_zos_module):
hosts = ansible_zos_module
TEST_PS = get_tmp_ds_name()
- hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m")
+ hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5)
hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS))
params = dict(src=TEST_PS, dest="/tmp/", flat=True)
dest_path = "/tmp/" + TEST_PS
@@ -229,7 +229,7 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module):
def test_fetch_partitioned_data_set(ansible_zos_module):
hosts = ansible_zos_module
TEST_PDS = get_tmp_ds_name()
- hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE")
+ hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse")
TEST_PDS_MEMBER = TEST_PDS + "(MEM)"
hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member")
hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER))
@@ -264,9 +264,9 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems):
cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path)
)
hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait=True
+ src="{0}/SAMPLE".format(temp_jcl_path), location="uss", wait_time_s=30
)
- hosts.all.zos_copy(content=TEST_DATA, dest=USS_FILE)
+ hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE))
hosts.all.zos_encode(
src=USS_FILE,
dest=test_vsam,
@@ -300,7 +300,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems):
def test_fetch_vsam_empty_data_set(ansible_zos_module):
hosts = ansible_zos_module
src_ds = "TEST.VSAM.DATA"
- create_vsam_data_set(hosts, src_ds, "KSDS", key_length=12, key_offset=0)
+ create_vsam_data_set(hosts, src_ds, "ksds", key_length=12, key_offset=0)
params = dict(src=src_ds, dest="/tmp/", flat=True)
dest_path = "/tmp/" + src_ds
try:
@@ -347,7 +347,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module):
def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module):
hosts = ansible_zos_module
TEST_PS = get_tmp_ds_name()
- hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m")
+ hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5)
hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS))
params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True)
dest_path = "/tmp/" + TEST_PS
@@ -368,7 +368,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module):
def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module):
hosts = ansible_zos_module
TEST_PDS = get_tmp_ds_name()
- hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE")
+ hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse")
TEST_PDS_MEMBER = TEST_PDS + "(MEM)"
hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member")
hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER))
@@ -417,7 +417,7 @@ def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module):
name=pds_name,
type="pds",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=25,
)
@@ -438,12 +438,12 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module):
name=pds_name,
type="pds",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=25,
)
hosts.all.zos_data_set(name=pds_name, type="pds")
- hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes")
+ hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes")
params = dict(src=pds_name + "(MYDATA)", dest="/tmp/", flat=True)
dest_path = "/tmp/MYDATA"
try:
@@ -535,16 +535,16 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module):
def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module):
hosts = ansible_zos_module
TEST_PS = get_tmp_ds_name()
- hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m")
+ hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5)
ds_name = TEST_PS
hosts.all.zos_data_set(name=TEST_PS, state="present")
hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS))
- dest_path = "/tmp/" + ds_name
+ dest_path = "/tmp/" + TEST_PS
with open(dest_path, "w") as infile:
infile.write(DUMMY_DATA)
local_checksum = checksum(dest_path, hash_func=sha256)
- params = dict(src=ds_name, dest="/tmp/", flat=True)
+ params = dict(src=TEST_PS, dest="/tmp/", flat=True)
try:
results = hosts.all.zos_fetch(**params)
for result in results.contacted.values():
@@ -562,15 +562,16 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module)
pds_name = get_tmp_ds_name()
dest_path = "/tmp/" + pds_name
full_path = dest_path + "/MYDATA"
+ pds_name_mem = pds_name + "(MYDATA)"
hosts.all.zos_data_set(
name=pds_name,
type="pds",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=25,
)
- hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes")
+ hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes")
os.mkdir(dest_path)
with open(full_path, "w") as infile:
infile.write(DUMMY_DATA)
diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py
index 50782be0b..067a2f192 100644
--- a/tests/functional/modules/test_zos_find_func.py
+++ b/tests/functional/modules/test_zos_find_func.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -15,6 +15,7 @@
__metaclass__ = type
from ibm_zos_core.tests.helpers.volumes import Volume_Handler
+import pytest
SEQ_NAMES = [
"TEST.FIND.SEQ.FUNCTEST.FIRST",
@@ -32,6 +33,8 @@
"TEST.FIND.VSAM.FUNCTEST.FIRST"
]
+DATASET_TYPES = ['seq', 'pds', 'pdse']
+
def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"):
hosts = ansible_zos_module
@@ -63,7 +66,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module):
batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES]
)
for ds in SEQ_NAMES:
- hosts.all.zos_lineinfile(src=ds, line=search_string)
+ hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ")
find_res = hosts.all.zos_find(
patterns=['TEST.FIND.SEQ.*.*'],
@@ -91,9 +94,9 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module):
batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES]
)
hosts.all.zos_data_set(name=new_ds, type='seq', state='present')
- hosts.all.zos_lineinfile(src=new_ds, line="incorrect string")
+ hosts.all.shell(cmd=f"decho 'incorrect string' \"{new_ds}\" ")
for ds in SEQ_NAMES:
- hosts.all.zos_lineinfile(src=ds, line=search_string)
+ hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ")
find_res = hosts.all.zos_find(
patterns=['TEST.FIND.SEQ.*.*', 'TEST.INVALID.*'],
@@ -118,20 +121,20 @@ def test_find_pds_members_containing_string(ansible_zos_module):
search_string = "hello"
try:
hosts.all.zos_data_set(
- batch=[dict(name=i, type='pds') for i in PDS_NAMES]
+ batch=[dict(name=i, type='pds', space_primary=1, space_type="m") for i in PDS_NAMES]
)
hosts.all.zos_data_set(
batch=[
dict(
name=i + "(MEMBER)",
- type="MEMBER",
+ type="member",
state='present',
replace='yes'
) for i in PDS_NAMES
]
)
for ds in PDS_NAMES:
- hosts.all.zos_lineinfile(src=ds + "(MEMBER)", line=search_string)
+ hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ")
find_res = hosts.all.zos_find(
pds_paths=['TEST.FIND.PDS.FUNCTEST.*'],
@@ -185,10 +188,10 @@ def test_exclude_members_from_matched_list(ansible_zos_module):
batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES]
)
hosts.all.zos_data_set(
- batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES]
+ batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES]
)
hosts.all.zos_data_set(
- batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES]
+ batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES]
)
find_res = hosts.all.zos_find(
pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*']
@@ -216,13 +219,14 @@ def test_find_data_sets_older_than_age(ansible_zos_module):
assert val.get('matched') == 2
-def test_find_data_sets_larger_than_size(ansible_zos_module):
+@pytest.mark.parametrize("ds_type", DATASET_TYPES)
+def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type):
hosts = ansible_zos_module
TEST_PS1 = 'TEST.PS.ONE'
TEST_PS2 = 'TEST.PS.TWO'
try:
- res = hosts.all.zos_data_set(name=TEST_PS1, state="present", size="5m")
- res = hosts.all.zos_data_set(name=TEST_PS2, state="present", size="5m")
+ res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_primary="1", space_type="m", type=ds_type)
+ res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_primary="1", space_type="m", type=ds_type)
find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k")
for val in find_res.contacted.values():
assert len(val.get('data_sets')) == 2
@@ -236,7 +240,7 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module):
hosts = ansible_zos_module
TEST_PS = 'USER.FIND.TEST'
try:
- hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k")
+ hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_primary="1", space_type="k")
find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m')
for val in find_res.contacted.values():
assert len(val.get('data_sets')) == 1
@@ -344,10 +348,10 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module):
batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES]
)
hosts.all.zos_data_set(
- batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES]
+ batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES]
)
hosts.all.zos_data_set(
- batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES]
+ batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES]
)
find_res = hosts.all.zos_find(
pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*']
diff --git a/tests/functional/modules/test_zos_gather_facts_func.py b/tests/functional/modules/test_zos_gather_facts_func.py
index 1903f0cbd..0d28b8f25 100644
--- a/tests/functional/modules/test_zos_gather_facts_func.py
+++ b/tests/functional/modules/test_zos_gather_facts_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2022
+# Copyright (c) IBM Corporation 2022, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -120,7 +120,6 @@ def test_with_gather_subset_bad(ansible_zos_module, gather_subset):
for result in results.contacted.values():
assert result is not None
- assert re.match(r'^BGYSC5203E', result.get('zinfo_err_msg'))
assert re.match(r'^An invalid subset', result.get('msg'))
diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py
index 830828769..96bc0b2bc 100644
--- a/tests/functional/modules/test_zos_job_output_func.py
+++ b/tests/functional/modules/test_zos_job_output_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -31,8 +31,6 @@
"""
TEMP_PATH = "/tmp/jcl"
-JOB_NOT_FOUND_MSG_TXT="The job with the name * could not be found."
-JOB_NOT_FOUND_MSG_TXT_ID="The job with the job_id INVALID could not be found."
def test_zos_job_output_no_job_id(ansible_zos_module):
hosts = ansible_zos_module
@@ -47,7 +45,8 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module):
results = hosts.all.zos_job_output(job_id="INVALID")
for result in results.contacted.values():
assert result.get("changed") is False
- assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT_ID
+ assert result.get("stderr") is not None
+ assert result.get("failed") is True
def test_zos_job_output_no_job_name(ansible_zos_module):
@@ -63,7 +62,7 @@ def test_zos_job_output_invalid_job_name(ansible_zos_module):
results = hosts.all.zos_job_output(job_name="INVALID")
for result in results.contacted.values():
assert result.get("changed") is False
- assert result.get("jobs")[0].get('job_name') == "INVALID"
+ assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None
def test_zos_job_output_no_owner(ansible_zos_module):
@@ -71,7 +70,7 @@ def test_zos_job_output_no_owner(ansible_zos_module):
results = hosts.all.zos_job_output(owner="")
for result in results.contacted.values():
assert result.get("changed") is False
- assert result.get("jobs") is None
+ assert result.get("msg") is not None
def test_zos_job_output_invalid_owner(ansible_zos_module):
@@ -79,7 +78,7 @@ def test_zos_job_output_invalid_owner(ansible_zos_module):
results = hosts.all.zos_job_output(owner="INVALID")
for result in results.contacted.values():
assert result.get("changed") is False
- assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT
+ assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None
def test_zos_job_output_reject(ansible_zos_module):
@@ -100,10 +99,10 @@ def test_zos_job_output_job_exists(ansible_zos_module):
)
jobs = hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None
+ src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None
)
-
for job in jobs.contacted.values():
+ print(job)
assert job.get("jobs") is not None
for job in jobs.contacted.values():
@@ -127,8 +126,8 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module):
hosts.all.shell(
cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH)
)
- hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None
+ result = hosts.all.zos_job_submit(
+ src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None
)
hosts.all.file(path=TEMP_PATH, state="absent")
dd_name = "JESMSGLG"
@@ -147,4 +146,4 @@ def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module):
hosts = ansible_zos_module
results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER")
for result in results.contacted.values():
- assert result.get("jobs") is not None
+ assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None
diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py
index fbb456c9f..11680ab57 100644
--- a/tests/functional/modules/test_zos_job_query_func.py
+++ b/tests/functional/modules/test_zos_job_query_func.py
@@ -63,7 +63,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module):
cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME)
)
results = hosts.all.zos_job_submit(
- src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait=True
+ src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="data_set", wait_time_s=10
)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
@@ -96,7 +96,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module):
cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME)
)
results = hosts.all.zos_job_submit(
- src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait=True
+ src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="data_set", wait_time_s=10
)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py
index f7ab3eb12..53f096c2b 100644
--- a/tests/functional/modules/test_zos_job_submit_func.py
+++ b/tests/functional/modules/test_zos_job_submit_func.py
@@ -163,7 +163,7 @@
//******************************************************************************
//* Job containing a non existent DSN that will force an error.
//* Returns:
-//* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?)
+//* ret_code->(code=null, msg=JCLERR, msg_txt=JCLERR, msg_code=None)
//* msg --> The JCL submitted with job id JOB00532 but there was an error,
//* please review the error for further details: The job completion
//* code (CC) was not in the job log. Please review the error
@@ -198,7 +198,7 @@
//* Another job containing no job card resulting in a JCLERROR with an value. It
//* won't always be 952, it will increment.
//* Returns:
-//* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null)
+//* ret_code->(code=null, msg=JCLERR, msg_text=JCLERR, msg_code=null)
//* msg --> The JCL submitted with job id JOB00728 but there was an error,
//* please review the error for further details: The job completion
//* code (CC) was not in the job log. Please review the error
@@ -238,7 +238,7 @@
//* not actually run the JCL. The job will be put on the H output queue, DDs
//* JESJCL and JESMSGLG are available. Ansible considers this a passing job.
//* Returns:
-//* ret_code->(code=null, msg=TYPRUN=SCAN, msg_text=, msg_code=null)
+//* ret_code->(code=null, msg=TYPRUN=SCAN, msg_txt=, msg_code=null)
//* msg --> The job JOB00551 was run with special job processing TYPRUN=SCAN.
//* This will result in no completion, return code or job steps and
//* changed will be false."
@@ -264,7 +264,7 @@
//* JESMSGLG and JESJCLIN are available. Ansible considers this a failing job
//* given currently the jobs status can not be determined so it times out.
//* Returns:
-//* ret_code->(code=null, msg=?, msg_text=, msg_code=?)
+//* ret_code->(code=None, msg=None, msg_txt=, msg_code=None)
//* msg --> The JCL submitted with job id JOB00555 but appears to be a long
//* running job that exceeded its maximum wait time of 10 second(s).
//* Consider using module zos_job_query to poll for a long running
@@ -286,11 +286,11 @@
//******************************************************************************
//* Job containing a TYPRUN=HOLD will cause JES to hold this JCL without
//* executing it until a special event occurs at which time, the operator will
-//* release the job from HOLD and allow the job to to continue processing.
+//* release the job from HOLD and allow the job to continue processing.
//* Ansible considers this a failing job
//* given currently the jobs status can not be determined so it times out.
//* Returns:
-//* ret_code->(code=null, msg=AC, msg_text=, msg_code=?)
+//* ret_code->(code=None, msg=None, msg_txt=, msg_code=None)
//* msg --> The JCL submitted with job id JOB00555 but appears to be a long
//* running job that exceeded its maximum wait time of 10 second(s).
//* Consider using module zos_job_query to poll for a long running
@@ -317,7 +317,7 @@
//* Ansible considers this a failing job
//* given currently the jobs status can not be determined so it times out.
//* Returns:
-//* ret_code->(code=null, msg=AC, msg_text=, msg_code=?)
+//* ret_code->(code=None, msg=None, msg_txt=, msg_code=None)
//* msg --> The JCL submitted with job id JOB00555 but appears to be a long
//* running job that exceeded its maximum wait time of 10 second(s).
//* Consider using module zos_job_query to poll for a long running
@@ -342,15 +342,34 @@
C_SRC_INVALID_UTF8 = """#include
int main()
{
- /* Generate and print all EBCDIC characters to stdout to
- * ensure non-printable chars can be handled by Python.
- * This will included the non-printable hex from DBB docs:
- * nl=0x15, cr=0x0D, lf=0x25, shiftOut=0x0E, shiftIn=0x0F
- */
-
- for (int i = 0; i <= 255; i++) {
- printf("Hex 0x%X is character: (%c)\\\\n",i,(char)(i));
- }
+ unsigned char a=0x64;
+ unsigned char b=0x2A;
+ unsigned char c=0xB8;
+ unsigned char d=0xFF;
+ unsigned char e=0x81;
+ unsigned char f=0x82;
+ unsigned char g=0x83;
+ unsigned char h=0x00;
+ /* The following are non-printables from DBB. */
+ unsigned char nl=0x15;
+ unsigned char cr=0x0D;
+ unsigned char lf=0x25;
+ unsigned char shiftOut=0x0E;
+ unsigned char shiftIn=0x0F;
+
+ printf("Value of a: Hex: %X, character: %c",a,a);
+ printf("Value of b: Hex: %X, character: %c",b,b);
+ printf("Value of c: Hex: %X, character: %c",c,c);
+ printf("Value of d: Hex: %X, character: %c",d,d);
+ printf("Value of e: Hex: %X, character: %c",e,e);
+ printf("Value of f: Hex: %X, character: %c",f,f);
+ printf("Value of g: Hex: %X, character: %c",g,g);
+ printf("Value of h: Hex: %X, character: %c",h,h);
+ printf("Value of NL: Hex: %X, character: %c",nl,nl);
+ printf("Value of CR: Hex: %X, character: %c",cr,cr);
+ printf("Value of LF: Hex: %X, character: %c",lf,lf);
+ printf("Value of Shift-Out: Hex: %X, character: %c",shiftOut,shiftOut);
+ printf("Value of Shift-In: Hex: %X, character: %c",shiftIn,shiftIn);
return 0;
}
@@ -359,7 +378,7 @@
JCL_INVALID_UTF8_CHARS_EXC = """//*
//******************************************************************************
//* Job that runs a C program that returns characters outside of the UTF-8 range
-//* expected by Python. This job tests a bugfix present in ZOAU v1.2.5.6 and
+//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 and
//* later that deals properly with these chars.
//* The JCL needs to be formatted to give it the directory where the C program
//* is located.
@@ -392,8 +411,8 @@ def test_job_submit_PDS(ansible_zos_module, location):
"""
Test zos_job_submit with a PDS(MEMBER), also test the default
value for 'location', ensure it works with and without the
- value "DATA_SET". If default_location is True, then don't
- pass a 'location:DATA_SET' allow its default to come through.
+ value "data_set". If default_location is True, then don't
+ pass a 'location:data_set' allow its default to come through.
"""
try:
results = None
@@ -403,19 +422,21 @@ def test_job_submit_PDS(ansible_zos_module, location):
hosts.all.shell(
cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH)
)
+
hosts.all.zos_data_set(
name=data_set_name, state="present", type="pds", replace=True
)
+
hosts.all.shell(
cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name)
)
if bool(location.get("default_location")):
results = hosts.all.zos_job_submit(
- src="{0}(SAMPLE)".format(data_set_name), wait=True
+ src="{0}(SAMPLE)".format(data_set_name), wait_time_s=30
)
else:
results = hosts.all.zos_job_submit(
- src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait=True
+ src="{0}(SAMPLE)".format(data_set_name), location="data_set", wait_time_s=30
)
for result in results.contacted.values():
@@ -423,8 +444,8 @@ def test_job_submit_PDS(ansible_zos_module, location):
assert result.get("jobs")[0].get("ret_code").get("code") == 0
assert result.get("changed") is True
finally:
- hosts.all.file(path=TEMP_PATH, state="absent")
- hosts.all.zos_data_set(name=data_set_name, state="absent")
+ hosts.all.file(path=TEMP_PATH, state="absent")
+ hosts.all.zos_data_set(name=data_set_name, state="absent")
def test_job_submit_PDS_special_characters(ansible_zos_module):
@@ -444,8 +465,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module):
)
results = hosts.all.zos_job_submit(
src="{0}(SAMPLE)".format(DATA_SET_NAME_SPECIAL_CHARS),
- location="DATA_SET",
- wait=True,
+ location="data_set",
)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
@@ -464,7 +484,7 @@ def test_job_submit_USS(ansible_zos_module):
cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH)
)
results = hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None
+ src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None
)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
@@ -479,9 +499,10 @@ def test_job_submit_LOCAL(ansible_zos_module):
with open(tmp_file.name, "w") as f:
f.write(JCL_FILE_CONTENTS)
hosts = ansible_zos_module
- results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True)
+ results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10)
for result in results.contacted.values():
+ print(result)
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
assert result.get("jobs")[0].get("ret_code").get("code") == 0
assert result.get("changed") is True
@@ -492,7 +513,7 @@ def test_job_submit_LOCAL_extraR(ansible_zos_module):
with open(tmp_file.name, "w") as f:
f.write(JCL_FILE_CONTENTS_BACKSLASH_R)
hosts = ansible_zos_module
- results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True)
+ results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
@@ -505,7 +526,7 @@ def test_job_submit_LOCAL_BADJCL(ansible_zos_module):
with open(tmp_file.name, "w") as f:
f.write(JCL_FILE_CONTENTS_BAD)
hosts = ansible_zos_module
- results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True)
+ results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10)
for result in results.contacted.values():
# Expecting: The job completion code (CC) was not in the job log....."
@@ -537,7 +558,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems):
name=data_set_name, state="uncataloged", type="pds"
)
- results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1)
+ results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="data_set", volume=volume_1)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
assert result.get("jobs")[0].get("ret_code").get("code") == 0
@@ -568,7 +589,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module):
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)",
- location="DATA_SET", wait_time_s=wait_time_s)
+ location="data_set", wait_time_s=wait_time_s)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
@@ -601,7 +622,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module):
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)",
- location="DATA_SET", wait_time_s=wait_time_s)
+ location="data_set", wait_time_s=wait_time_s)
for result in results.contacted.values():
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
@@ -634,7 +655,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module):
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)",
- location="DATA_SET", wait_time_s=wait_time_s)
+ location="data_set", wait_time_s=wait_time_s)
for result in results.contacted.values():
assert result.get("msg") is not None
@@ -661,7 +682,7 @@ def test_job_submit_max_rc(ansible_zos_module, args):
f.write(JCL_FILE_CONTENTS_RC_8)
results = hosts.all.zos_job_submit(
- src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"]
+ src=tmp_file.name, location="local", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"]
)
for result in results.contacted.values():
@@ -674,10 +695,7 @@ def test_job_submit_max_rc(ansible_zos_module, args):
#Expecting: - "The job return code 8 was non-zero in the job output, this job has failed"
# - Consider using module zos_job_query to poll for a long running job or
# increase option \\'wait_times_s` to a value greater than 10.",
- if result.get('duration'):
- duration = result.get('duration')
- else:
- duration = 0
+ duration = result.get('duration')
if duration >= args["wait_time_s"]:
re.search(r'long running job', repr(result.get("msg")))
@@ -694,11 +712,11 @@ def test_job_submit_max_rc(ansible_zos_module, args):
assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg")))
elif args["max_rc"] == 12:
- # Will not fail but changed will be false for the non-zero RC, there
- # are other possibilities like an ABEND or JCL ERROR will fail this even
+ # Will not fail and as the max_rc is set to 12 and the rc is 8 is a change true
+ # there are other possibilities like an ABEND or JCL ERROR will fail this even
# with a MAX RC
assert result.get("msg") is None
- assert result.get('changed') is False
+ assert result.get('changed') is True
assert result.get("jobs")[0].get("ret_code").get("code") < 12
finally:
hosts.all.file(path=tmp_file.name, state="absent")
@@ -752,7 +770,7 @@ def test_job_submit_jinja_template(ansible_zos_module, args):
results = hosts.all.zos_job_submit(
src=tmp_file.name,
- location="LOCAL",
+ location="local",
use_template=True,
template_parameters=args["options"]
)
@@ -774,7 +792,12 @@ def test_job_submit_full_input(ansible_zos_module):
cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FULL_INPUT), TEMP_PATH)
)
results = hosts.all.zos_job_submit(
- src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait_time_s=20, volume=None
+ src="{0}/SAMPLE".format(TEMP_PATH),
+ location="uss",
+ volume=None,
+ # This job used to set wait=True, but since it has been deprecated
+ # and removed, it now waits up to 30 seconds.
+ wait_time_s=30
)
for result in results.contacted.values():
print(result)
@@ -789,7 +812,7 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module):
with open(tmp_file.name, "w") as f:
f.write(JCL_FILE_CONTENTS_NO_DSN)
hosts = ansible_zos_module
- results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="LOCAL")
+ results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="local")
import pprint
for result in results.contacted.values():
assert result.get("changed") is False
@@ -797,21 +820,19 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module):
assert result.get("jobs")[0].get("job_id") is not None
-# Should have a JCL ERROR
def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module):
tmp_file = tempfile.NamedTemporaryFile(delete=True)
with open(tmp_file.name, "w") as f:
f.write(JCL_FILE_CONTENTS_INVALID_USER)
hosts = ansible_zos_module
- results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL")
+ results = hosts.all.zos_job_submit(src=tmp_file.name, location="local")
for result in results.contacted.values():
assert result.get("changed") is False
- assert re.search(r'return code was not available', repr(result.get("msg")))
- assert re.search(r'status SEC', repr(result.get("msg")))
+ assert re.search(r'please review the error for further details', repr(result.get("msg")))
+ assert re.search(r'please review the job log for status SEC', repr(result.get("msg")))
assert result.get("jobs")[0].get("job_id") is not None
- assert re.search(r'please review the job log', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")))
- assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg")))
+ assert re.search(r'please review the job log for status SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")))
def test_job_submit_local_jcl_typrun_scan(ansible_zos_module):
@@ -820,7 +841,7 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module):
f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN)
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=tmp_file.name,
- location="LOCAL",
+ location="local",
wait_time_s=20,
encoding={
"from": "UTF-8",
@@ -841,7 +862,7 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module):
f.write(JCL_FILE_CONTENTS_TYPRUN_COPY)
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=tmp_file.name,
- location="LOCAL",
+ location="local",
wait_time_s=20,
encoding={
"from": "UTF-8",
@@ -854,8 +875,8 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module):
assert result.get("jobs")[0].get("job_id") is not None
assert re.search(r'please review the job log', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")))
assert result.get("jobs")[0].get("ret_code").get("code") is None
- assert result.get("jobs")[0].get("ret_code").get("msg") == "?"
- assert result.get("jobs")[0].get("ret_code").get("msg_code") == "?"
+ assert result.get("jobs")[0].get("ret_code").get("msg") is None
+ assert result.get("jobs")[0].get("ret_code").get("msg_code") is None
def test_job_submit_local_jcl_typrun_hold(ansible_zos_module):
@@ -864,7 +885,7 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module):
f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD)
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=tmp_file.name,
- location="LOCAL",
+ location="local",
wait_time_s=20,
encoding={
"from": "UTF-8",
@@ -876,7 +897,7 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module):
assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")))
assert result.get("jobs")[0].get("ret_code").get("code") is None
assert result.get("jobs")[0].get("ret_code").get("msg") == "AC"
- assert result.get("jobs")[0].get("ret_code").get("msg_code") == "?"
+ assert result.get("jobs")[0].get("ret_code").get("msg_code") is None
def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module):
@@ -885,7 +906,7 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module):
f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD)
hosts = ansible_zos_module
results = hosts.all.zos_job_submit(src=tmp_file.name,
- location="LOCAL",
+ location="local",
wait_time_s=20,
encoding={
"from": "UTF-8",
@@ -897,7 +918,8 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module):
assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")))
assert result.get("jobs")[0].get("ret_code").get("code") is None
assert result.get("jobs")[0].get("ret_code").get("msg") == "AC"
- assert result.get("jobs")[0].get("ret_code").get("msg_code") == "?"
+ assert result.get("jobs")[0].get("ret_code").get("msg_code") is None
+
# This test case is related to the following GitHub issues:
# - https://github.com/ansible-collections/ibm_zos_core/issues/677
@@ -920,12 +942,11 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module):
results = hosts.all.zos_job_submit(
src=tmp_file.name,
- location="LOCAL",
+ location="local",
wait_time_s=15
)
for result in results.contacted.values():
- print(result)
# We shouldn't get an error now that ZOAU handles invalid/unprintable
# UTF-8 chars correctly.
assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py
index a15201b1c..a9a29227d 100644
--- a/tests/functional/modules/test_zos_lineinfile_func.py
+++ b/tests/functional/modules/test_zos_lineinfile_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2022, 2023
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -30,14 +30,13 @@
int main(int argc, char** argv)
{
char dsname[ strlen(argv[1]) + 4];
- sprintf(dsname, "//'%s'", argv[1]);
+ sprintf(dsname, \\\"//'%s'\\\", argv[1]);
FILE* member;
- member = fopen(dsname, "rb,type=record");
+ member = fopen(dsname, \\\"rb,type=record\\\");
sleep(300);
fclose(member);
return 0;
-}
-"""
+}"""
call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M
//LOCKMEM EXEC PGM=BPXBATCH
@@ -225,7 +224,7 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT)
hosts = ansible_zos_module
hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE))
hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE)
- if DS_TYPE in ["PDS", "PDSE"]:
+ if DS_TYPE in ["pds", "pdse"]:
DS_FULL_NAME = DS_NAME + "(MEM)"
hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member")
cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME)
@@ -239,12 +238,13 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT)
def remove_ds_environment(ansible_zos_module, DS_NAME):
hosts = ansible_zos_module
hosts.all.zos_data_set(name=DS_NAME, state="absent")
+
# supported data set types
-DS_TYPE = ['SEQ', 'PDS', 'PDSE']
+DS_TYPE = ['seq', 'pds', 'pdse']
# not supported data set types
-NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS']
+NS_DS_TYPE = ['esds', 'rrds', 'lds']
# The encoding will be only use on a few test
-ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8']
+ENCODING = [ 'ISO8859-1', 'UTF-8']
#########################
# USS test cases
@@ -675,93 +675,97 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype):
finally:
remove_ds_environment(ansible_zos_module, ds_name)
-
-@pytest.mark.ds
-@pytest.mark.parametrize("dstype", DS_TYPE)
-def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype):
- hosts = ansible_zos_module
- ds_type = dstype
- params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present")
- ds_name = get_tmp_ds_name()
- temp_file = "/tmp/" + ds_name
- content = TEST_CONTENT
- try:
- ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
- params["path"] = ds_full_name
- results = hosts.all.zos_lineinfile(**params)
- for result in results.contacted.values():
- assert result.get("changed") == 1
- results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
- for result in results.contacted.values():
- assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER
- finally:
- remove_ds_environment(ansible_zos_module, ds_name)
-
-
-@pytest.mark.ds
-@pytest.mark.parametrize("dstype", DS_TYPE)
-def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype):
- hosts = ansible_zos_module
- ds_type = dstype
- params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present")
- ds_name = get_tmp_ds_name()
- temp_file = "/tmp/" + ds_name
- content = TEST_CONTENT
- try:
- ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
- params["path"] = ds_full_name
- results = hosts.all.zos_lineinfile(**params)
- for result in results.contacted.values():
- assert result.get("changed") == 1
- results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
- for result in results.contacted.values():
- assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE
- finally:
- remove_ds_environment(ansible_zos_module, ds_name)
-
-
-@pytest.mark.ds
-@pytest.mark.parametrize("dstype", DS_TYPE)
-def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype):
- hosts = ansible_zos_module
- ds_type = dstype
- params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present")
- ds_name = get_tmp_ds_name()
- temp_file = "/tmp/" + ds_name
- content = TEST_CONTENT
- try:
- ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
- params["path"] = ds_full_name
- results = hosts.all.zos_lineinfile(**params)
- for result in results.contacted.values():
- assert result.get("changed") == 1
- results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
- for result in results.contacted.values():
- assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH
- finally:
- remove_ds_environment(ansible_zos_module, ds_name)
-
-
-@pytest.mark.ds
-@pytest.mark.parametrize("dstype", DS_TYPE)
-def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype):
- hosts = ansible_zos_module
- ds_type = dstype
- params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present")
- ds_name = get_tmp_ds_name()
- temp_file = "/tmp/" + ds_name
- content = TEST_CONTENT
- try:
- ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
- params["path"] = ds_full_name
- results = hosts.all.zos_lineinfile(**params)
- for result in results.contacted.values():
- assert result.get("changed") == 1
- results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
- for result in results.contacted.values():
- assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH
- finally:
- remove_ds_environment(ansible_zos_module, ds_name)
+#GH Issue #1244 / JIRA NAZARE-10439
+#@pytest.mark.ds
+#@pytest.mark.parametrize("dstype", DS_TYPE)
+#def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype):
+# hosts = ansible_zos_module
+# ds_type = dstype
+# params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present")
+# ds_name = get_tmp_ds_name()
+# temp_file = "/tmp/" + ds_name
+# content = TEST_CONTENT
+# try:
+# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
+# params["path"] = ds_full_name
+# results = hosts.all.zos_lineinfile(**params)
+# for result in results.contacted.values():
+# print(result)
+# assert result.get("changed") == 1
+# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
+# for result in results.contacted.values():
+# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER
+# finally:
+# remove_ds_environment(ansible_zos_module, ds_name)
+
+#GH Issue #1244 / JIRA NAZARE-10439
+#@pytest.mark.ds
+#@pytest.mark.parametrize("dstype", DS_TYPE)
+#def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype):
+# hosts = ansible_zos_module
+# ds_type = dstype
+# params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present")
+# ds_name = get_tmp_ds_name()
+# temp_file = "/tmp/" + ds_name
+# content = TEST_CONTENT
+# try:
+# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
+# params["path"] = ds_full_name
+# results = hosts.all.zos_lineinfile(**params)
+# for result in results.contacted.values():
+# print(result)
+# assert result.get("changed") == 1
+# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
+# for result in results.contacted.values():
+# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE
+# finally:
+# remove_ds_environment(ansible_zos_module, ds_name)
+
+#GH Issue #1244 / JIRA NAZARE-10439
+#@pytest.mark.ds
+#@pytest.mark.parametrize("dstype", DS_TYPE)
+#def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype):
+# hosts = ansible_zos_module
+# ds_type = dstype
+# params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present")
+# ds_name = get_tmp_ds_name()
+# temp_file = "/tmp/" + ds_name
+# content = TEST_CONTENT
+# try:
+# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
+# params["path"] = ds_full_name
+# results = hosts.all.zos_lineinfile(**params)
+# for result in results.contacted.values():
+# print(result)
+# assert result.get("changed") == 1
+# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
+# for result in results.contacted.values():
+# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH
+# finally:
+# remove_ds_environment(ansible_zos_module, ds_name)
+
+#GH Issue #1244 / JIRA NAZARE-10439
+#@pytest.mark.ds
+#@pytest.mark.parametrize("dstype", DS_TYPE)
+#def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype):
+# hosts = ansible_zos_module
+# ds_type = dstype
+# params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present")
+# ds_name = get_tmp_ds_name()
+# temp_file = "/tmp/" + ds_name
+# content = TEST_CONTENT
+# try:
+# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content)
+# params["path"] = ds_full_name
+# results = hosts.all.zos_lineinfile(**params)
+# for result in results.contacted.values():
+# print(result)
+# assert result.get("changed") == 1
+# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
+# for result in results.contacted.values():
+# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH
+# finally:
+# remove_ds_environment(ansible_zos_module, ds_name)
@pytest.mark.ds
@@ -790,7 +794,7 @@ def test_ds_line_absent(ansible_zos_module, dstype):
def test_ds_tmp_hlq_option(ansible_zos_module):
# This TMPHLQ only works with sequential datasets
hosts = ansible_zos_module
- ds_type = "SEQ"
+ ds_type = "seq"
kwargs = dict(backup_name=r"TMPHLQ\..")
params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ")
content = TEST_CONTENT
@@ -845,7 +849,7 @@ def test_ds_line_force(ansible_zos_module, dstype):
MEMBER_1, MEMBER_2 = "MEM1", "MEM2"
TEMP_FILE = "/tmp/{0}".format(MEMBER_2)
content = TEST_CONTENT
- if ds_type == "SEQ":
+ if ds_type == "seq":
params["path"] = default_data_set_name+".{0}".format(MEMBER_2)
else:
params["path"] = default_data_set_name+"({0})".format(MEMBER_2)
@@ -862,7 +866,7 @@ def test_ds_line_force(ansible_zos_module, dstype):
]
)
# write memeber to verify cases
- if ds_type in ["PDS", "PDSE"]:
+ if ds_type in ["pds", "pdse"]:
cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"])
else:
cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"])
@@ -871,12 +875,12 @@ def test_ds_line_force(ansible_zos_module, dstype):
for result in results.contacted.values():
assert int(result.get("stdout")) != 0
# copy/compile c program and copy jcl to hold data set lock for n seconds in background(&)
- hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True)
- hosts.all.zos_copy(
- content=call_c_jcl.format(default_data_set_name, MEMBER_1),
- dest='/tmp/disp_shr/call_c_pgm.jcl',
- force=True
- )
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c'))
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(
+ call_c_jcl.format(
+ default_data_set_name,
+ MEMBER_1),
+ '/tmp/disp_shr/call_c_pgm.jcl'))
hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/")
hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/")
time.sleep(5)
@@ -897,7 +901,7 @@ def test_ds_line_force(ansible_zos_module, dstype):
@pytest.mark.ds
-@pytest.mark.parametrize("dstype", ["PDS","PDSE"])
+@pytest.mark.parametrize("dstype", ["pds","pdse"])
def test_ds_line_force_fail(ansible_zos_module, dstype):
hosts = ansible_zos_module
ds_type = dstype
@@ -925,12 +929,13 @@ def test_ds_line_force_fail(ansible_zos_module, dstype):
for result in results.contacted.values():
assert int(result.get("stdout")) != 0
# copy/compile c program and copy jcl to hold data set lock for n seconds in background(&)
- hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True)
- hosts.all.zos_copy(
- content=call_c_jcl.format(default_data_set_name, MEMBER_1),
- dest='/tmp/disp_shr/call_c_pgm.jcl',
- force=True
- )
+ hosts.all.file(path="/tmp/disp_shr", state='directory')
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c'))
+ hosts.all.shell(cmd="echo \"{0}\" > {1}".format(
+ call_c_jcl.format(
+ default_data_set_name,
+ MEMBER_1),
+ '/tmp/disp_shr/call_c_pgm.jcl'))
hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/")
hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/")
time.sleep(5)
@@ -983,7 +988,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype):
def test_uss_encoding(ansible_zos_module, encoding):
hosts = ansible_zos_module
insert_data = "Insert this string"
- params = dict(insertafter="SIMPLE", line=insert_data, state="present")
+ params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding})
params["encoding"] = encoding
full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3]
content = "SIMPLE LINE TO VERIFY"
@@ -991,12 +996,11 @@ def test_uss_encoding(ansible_zos_module, encoding):
hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE))
hosts.all.file(path=full_path, state="touch")
hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path))
- hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"])
params["path"] = full_path
results = hosts.all.zos_lineinfile(**params)
for result in results.contacted.values():
assert result.get("changed") == 1
- results = hosts.all.shell(cmd="cat {0}".format(params["path"]))
+ results = hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {encoding} {full_path}")
for result in results.contacted.values():
assert result.get("stdout") == EXPECTED_ENCODING
finally:
@@ -1010,16 +1014,16 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype):
hosts = ansible_zos_module
ds_type = dstype
insert_data = "Insert this string"
- params = dict(insertafter="SIMPLE", line=insert_data, state="present")
+ params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding})
params["encoding"] = encoding
ds_name = get_tmp_ds_name()
temp_file = "/tmp/" + ds_name
content = "SIMPLE LINE TO VERIFY"
try:
hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file))
- hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"])
+ hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ")
hosts.all.zos_data_set(name=ds_name, type=ds_type)
- if ds_type in ["PDS", "PDSE"]:
+ if ds_type in ["pds", "pdse"]:
ds_full_name = ds_name + "(MEM)"
hosts.all.zos_data_set(name=ds_full_name, state="present", type="member")
cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name)
@@ -1032,9 +1036,10 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype):
results = hosts.all.zos_lineinfile(**params)
for result in results.contacted.values():
assert result.get("changed") == 1
- hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047")
- results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"]))
+ hosts.all.shell(cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" ")
+ results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(ds_full_name))
for result in results.contacted.values():
+
assert result.get("stdout") == EXPECTED_ENCODING
finally:
remove_ds_environment(ansible_zos_module, ds_name)
\ No newline at end of file
diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py
index 8883ddebc..1e593c3ff 100644
--- a/tests/functional/modules/test_zos_mount_func.py
+++ b/tests/functional/modules/test_zos_mount_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2021, 2022
+# Copyright (c) IBM Corporation 2020, 2024
# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0)
from __future__ import absolute_import, division, print_function
@@ -9,22 +9,9 @@
import tempfile
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
- data_set,
-)
-
-from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
- MissingZOAUImport,
-)
-
from ibm_zos_core.tests.helpers.volumes import Volume_Handler
from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name
-try:
- from zoautil_py import Datasets
-except Exception:
- Datasets = MissingZOAUImport()
-
INITIAL_PRM_MEMBER = """/* Initial file to look like BPXPRM */
/* some settings at the top */
@@ -79,9 +66,6 @@ def create_sourcefile(hosts, volume):
starter, thisfile, str(type(thisfile))
)
)
- # fs_du = data_set.DataSetUtils(thisfile)
- # fs_exists = fs_du.exists()
- # if fs_exists is False:
hosts.all.shell(
cmd="zfsadm define -aggregate "
@@ -105,7 +89,7 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems):
srcfn = create_sourcefile(hosts, volume_1)
try:
mount_result = hosts.all.zos_mount(
- src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted"
+ src=srcfn, path="/pythonx", fs_type="zfs", state="mounted"
)
for result in mount_result.values():
assert result.get("rc") == 0
@@ -115,7 +99,7 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems):
hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="absent",
)
hosts.all.file(path="/pythonx/", state="absent")
@@ -128,10 +112,10 @@ def test_double_mount(ansible_zos_module, volumes_on_systems):
volume_1 = volumes.get_available_vol()
srcfn = create_sourcefile(hosts, volume_1)
try:
- hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted")
+ hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted")
# The duplication here is intentional... want to make sure it is seen
mount_result = hosts.all.zos_mount(
- src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted"
+ src=srcfn, path="/pythonx", fs_type="zfs", state="mounted"
)
for result in mount_result.values():
assert result.get("rc") == 0
@@ -141,7 +125,7 @@ def test_double_mount(ansible_zos_module, volumes_on_systems):
hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="absent",
)
hosts.all.file(path="/pythonx/", state="absent")
@@ -153,9 +137,9 @@ def test_remount(ansible_zos_module, volumes_on_systems):
volume_1 = volumes.get_available_vol()
srcfn = create_sourcefile(hosts, volume_1)
try:
- hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted")
+ hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted")
mount_result = hosts.all.zos_mount(
- src=srcfn, path="/pythonx", fs_type="ZFS", state="remounted"
+ src=srcfn, path="/pythonx", fs_type="zfs", state="remounted"
)
for result in mount_result.values():
assert result.get("rc") == 0
@@ -164,7 +148,7 @@ def test_remount(ansible_zos_module, volumes_on_systems):
hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="absent",
)
hosts.all.file(path="/pythonx/", state="absent")
@@ -196,7 +180,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_
name=dest,
type="pdse",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
)
@@ -212,7 +196,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_
mount_result = hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="mounted",
persistent=dict(data_store=dest_path),
)
@@ -225,7 +209,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_
hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="absent",
)
hosts.all.file(path=tmp_file_filename, state="absent")
@@ -235,7 +219,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_
state="absent",
type="pdse",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
)
@@ -280,7 +264,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst
name=dest,
type="pdse",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
)
@@ -299,7 +283,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst
mount_result = hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="mounted",
persistent=dict(
data_store=dest_path,
@@ -338,14 +322,11 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst
assert srcfn in data
assert "bpxtablecomment - try this" in data
- # fs_du = data_set.DataSetUtils(back_dest_path)
- # fs_exists = fs_du.exists()
- # assert fs_exists
finally:
hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="absent",
)
hosts.all.file(path=tmp_file_filename, state="absent")
@@ -356,7 +337,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst
state="absent",
type="pdse",
space_primary=5,
- space_type="M",
+ space_type="m",
record_format="fba",
record_length=80,
)
@@ -368,7 +349,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems)
srcfn = create_sourcefile(hosts, volume_1)
try:
mount_result = hosts.all.zos_mount(
- src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted"
+ src=srcfn, path="/pythonx", fs_type="zfs", state="mounted"
)
for result in mount_result.values():
assert result.get("rc") == 0
@@ -377,11 +358,11 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems)
finally:
tmphlq = "TMPHLQ"
persist_data_set = get_tmp_ds_name()
- hosts.all.zos_data_set(name=persist_data_set, state="present", type="SEQ")
+ hosts.all.zos_data_set(name=persist_data_set, state="present", type="seq")
unmount_result = hosts.all.zos_mount(
src=srcfn,
path="/pythonx",
- fs_type="ZFS",
+ fs_type="zfs",
state="absent",
tmp_hlq=tmphlq,
persistent=dict(data_store=persist_data_set, backup=True)
diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py
index fd20a6a92..3e97f6026 100644
--- a/tests/functional/modules/test_zos_mvs_raw_func.py
+++ b/tests/functional/modules/test_zos_mvs_raw_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2020, 2022
+# Copyright (c) IBM Corporation 2020, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -393,7 +393,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste
# * because that means data set exists and is VSAM so we can't read it
results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set))
for result in results.contacted.values():
- assert "EDC5041I" in result.get("stderr", "")
+ assert "EDC5041I" or "EDC5049I" in result.get("stderr", "")
finally:
hosts.all.zos_data_set(name=default_data_set, state="absent")
diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py
index c7afab2f9..f8f521a28 100644
--- a/tests/functional/modules/test_zos_operator_action_query_func.py
+++ b/tests/functional/modules/test_zos_operator_action_query_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2023
+# Copyright (c) IBM Corporation 2019, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py
index 6891cffa8..d60d26ec2 100644
--- a/tests/functional/modules/test_zos_operator_func.py
+++ b/tests/functional/modules/test_zos_operator_func.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2019, 2020, 2023
+# Copyright (c) IBM Corporation 2019, 2023
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py
index c0b1fe293..37697da80 100644
--- a/tests/functional/modules/test_zos_unarchive_func.py
+++ b/tests/functional/modules/test_zos_unarchive_func.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2023
+# Copyright (c) IBM Corporation 2023, 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -352,16 +352,16 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module):
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2"]),
]
)
@pytest.mark.parametrize(
"record_length", [80, 120]
)
@pytest.mark.parametrize(
- "record_format", ["FB", "VB",],
+ "record_format", ["fb", "vb",],
)
def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format):
try:
@@ -370,7 +370,6 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec
DATASET = get_tmp_ds_name(3)
HLQ = "ANSIBLE"
# Clean env
- hosts.all.zos_data_set(name=DATASET, state="absent")
hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent")
# Create source data set
hosts.all.zos_data_set(
@@ -379,18 +378,20 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec
state="present",
record_length=record_length,
record_format=record_format,
+ replace=True
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{DATASET}({member})",
type="member",
- state="present"
+ state="present",
+ replace=True
)
# Write some content into src the same size of the record,
# need to reduce 4 from V and VB due to RDW
- if record_format in ["V", "VB"]:
+ if record_format in ["v", "vb"]:
test_line = "a" * (record_length - 4)
else:
test_line = "a" * record_length
@@ -403,13 +404,13 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec
format_dict = dict(name=format)
if format == "terse":
- format_dict["format_options"] = dict(terse_pack="SPACK")
+ format_dict["format_options"] = dict(terse_pack="spack")
archive_result = hosts.all.zos_archive(
src=DATASET,
dest=MVS_DEST_ARCHIVE,
format=format_dict,
dest_data_set=dict(name=DATASET,
- type="SEQ",
+ type="seq",
record_format=record_format,
record_length=record_length),
)
@@ -462,16 +463,16 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2"]),
]
)
@pytest.mark.parametrize(
"record_length", [80, 120]
)
@pytest.mark.parametrize(
- "record_format", ["FB", "VB",],
+ "record_format", ["fb", "vb",],
)
def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format):
try:
@@ -480,7 +481,6 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d
DATASET = get_tmp_ds_name(3)
HLQ = "ANSIBLE"
# Clean env
- hosts.all.zos_data_set(name=DATASET, state="absent")
hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent")
# Create source data set
hosts.all.zos_data_set(
@@ -489,18 +489,20 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d
state="present",
record_length=record_length,
record_format=record_format,
+ replace=True
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{DATASET}({member})",
type="member",
- state="present"
+ state="present",
+ replace=True
)
# Write some content into src the same size of the record,
# need to reduce 4 from V and VB due to RDW
- if record_format in ["V", "VB"]:
+ if record_format in ["v", "vb"]:
test_line = "a" * (record_length - 4)
else:
test_line = "a" * record_length
@@ -514,7 +516,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d
format_dict = dict(name=format)
format_dict["format_options"] = dict(use_adrdssu=True)
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
archive_result = hosts.all.zos_archive(
src=DATASET,
dest=MVS_DEST_ARCHIVE,
@@ -561,9 +563,9 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set):
@@ -577,7 +579,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format,
n=1,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -595,10 +597,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format,
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
- src=""" "{0}*" """.format(DATASET),
+ src="{0}*".format(DATASET),
dest=MVS_DEST_ARCHIVE,
format=format_dict,
)
@@ -637,9 +639,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format,
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set):
@@ -653,7 +655,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module,
n=2,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -671,10 +673,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module,
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
- src=""" "{0}*" """.format(DATASET),
+ src="{0}*".format(DATASET),
dest=MVS_DEST_ARCHIVE,
format=format_dict,
)
@@ -723,9 +725,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module,
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set):
@@ -739,7 +741,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module,
n=2,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -757,10 +759,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module,
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
- src=""" "{0}*" """.format(DATASET),
+ src="{0}*".format(DATASET),
dest=MVS_DEST_ARCHIVE,
format=format_dict,
)
@@ -805,9 +807,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module,
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set):
@@ -821,7 +823,7 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s
n=2,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -839,10 +841,10 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
archive_result = hosts.all.zos_archive(
- src=""" "{0}*" """.format(DATASET),
+ src="{0}*".format(DATASET),
dest=MVS_DEST_ARCHIVE,
format=format_dict,
)
@@ -882,9 +884,9 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ"),
- dict(dstype="PDS"),
- dict(dstype="PDSE"),
+ dict(dstype="seq"),
+ dict(dstype="pds"),
+ dict(dstype="pdse"),
]
)
@pytest.mark.parametrize(
@@ -908,7 +910,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f
n=1,
type=data_set.get("dstype"))
ds_to_write = target_ds_list
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
target_member_list = []
for ds in target_ds_list:
target_member_list.extend(
@@ -926,10 +928,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f
format_dict = dict(name=format, format_options=dict())
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
format_dict["format_options"].update(use_adrdssu=True)
hosts.all.zos_archive(
- src=""" "{0}*" """.format(DATASET),
+ src="{0}*".format(DATASET),
dest=MVS_DEST_ARCHIVE,
format=format_dict,
)
@@ -970,16 +972,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f
])
@pytest.mark.parametrize(
"data_set", [
- dict(dstype="SEQ", members=[""]),
- dict(dstype="PDS", members=["MEM1", "MEM2"]),
- dict(dstype="PDSE", members=["MEM1", "MEM2"]),
+ dict(dstype="seq", members=[""]),
+ dict(dstype="pds", members=["MEM1", "MEM2"]),
+ dict(dstype="pdse", members=["MEM1", "MEM2"]),
]
)
@pytest.mark.parametrize(
"record_length", [80, 120]
)
@pytest.mark.parametrize(
- "record_format", ["FB", "VB",],
+ "record_format", ["fb", "vb",],
)
def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format):
try:
@@ -1000,7 +1002,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da
record_format=record_format,
)
# Create members if needed
- if data_set.get("dstype") in ["PDS", "PDSE"]:
+ if data_set.get("dstype") in ["pds", "pdse"]:
for member in data_set.get("members"):
hosts.all.zos_data_set(
name=f"{DATASET}({member})",
@@ -1009,7 +1011,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da
)
# Write some content into src the same size of the record,
# need to reduce 4 from V and VB due to RDW
- if record_format in ["V", "VB"]:
+ if record_format in ["v", "vb"]:
test_line = "a" * (record_length - 4)
else:
test_line = "a" * record_length
@@ -1023,7 +1025,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da
format_dict = dict(name=format)
format_dict["format_options"] = dict(use_adrdssu=True)
if format == "terse":
- format_dict["format_options"].update(terse_pack="SPACK")
+ format_dict["format_options"].update(terse_pack="spack")
archive_result = hosts.all.zos_archive(
src=DATASET,
dest=MVS_DEST_ARCHIVE,
diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py
index b0ed97d30..bd261f9ed 100644
--- a/tests/helpers/volumes.py
+++ b/tests/helpers/volumes.py
@@ -18,7 +18,7 @@
import pytest
import time
import yaml
-
+from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name
class Volume:
""" Volume class represents a volume on the z system, it tracks if the volume name
and status of the volume with respect to the current test session."""
@@ -118,4 +118,44 @@ def read_test_config(path):
if len(config["VOLUMES"]) > 0:
return config["VOLUMES"]
else:
- return None
\ No newline at end of file
+ return None
+
+def get_volumes_with_vvds( ansible_zos_module, volumes_on_system):
+ """
+ Get a list of volumes that contain a VVDS, if no volume has a VVDS then
+ creates one on any volume.
+ """
+ volumes_with_vvds = find_volumes_with_vvds(ansible_zos_module, volumes_on_system)
+ if len(volumes_with_vvds) == 0 and len(volumes_on_system) > 0:
+ volumes_with_vvds = list()
+ for volume in volumes_on_system:
+ if create_vvds_on_volume(ansible_zos_module, volume):
+ volumes_with_vvds.append(volume)
+ return volumes_with_vvds
+
+def find_volumes_with_vvds( ansible_zos_module, volumes_on_system):
+ """
+ Fetches all VVDS in the system and returns a list of volumes for
+ which there are VVDS.
+ """
+ hosts = ansible_zos_module
+ vls_result = hosts.all.shell(cmd="vls SYS1.VVDS.*")
+ for vls_res in vls_result.contacted.values():
+ vvds_list = vls_res.get("stdout")
+ return [volume for volume in volumes_on_system if volume in vvds_list]
+
+def create_vvds_on_volume( ansible_zos_module, volume):
+ """
+ Creates a vvds on a volume by allocating a small VSAM and then deleting it.
+ """
+ hosts = ansible_zos_module
+ data_set_name = get_tmp_ds_name(mlq_size=7, llq_size=7)
+ hosts.all.shell(cmd=f"dtouch -tesds -s10K -V{volume} {data_set_name}")
+ # Remove that dataset
+ hosts.all.shell(cmd=f"drm {data_set_name}")
+ # Verify that the VVDS is in place
+ vls_result = hosts.all.shell(cmd=f"vls SYS1.VVDS.V{volume} ")
+ for vls_res in vls_result.contacted.values():
+ if vls_res.get("rc") == 0:
+ return True
+ return False
diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt
index 8778d80f9..42b415ae6 100644
--- a/tests/sanity/ignore-2.10.txt
+++ b/tests/sanity/ignore-2.10.txt
@@ -27,8 +27,6 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing
plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported
diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt
index 9ceaf3c97..bf118f7b9 100644
--- a/tests/sanity/ignore-2.11.txt
+++ b/tests/sanity/ignore-2.11.txt
@@ -27,10 +27,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing
plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt
index 9ceaf3c97..bf118f7b9 100644
--- a/tests/sanity/ignore-2.12.txt
+++ b/tests/sanity/ignore-2.12.txt
@@ -27,10 +27,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing
plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt
index 70d4764e1..8176aa2bb 100644
--- a/tests/sanity/ignore-2.13.txt
+++ b/tests/sanity/ignore-2.13.txt
@@ -7,10 +7,7 @@ plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc
plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt
index 89cf4db51..c04ae2328 100644
--- a/tests/sanity/ignore-2.14.txt
+++ b/tests/sanity/ignore-2.14.txt
@@ -2,16 +2,9 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und
plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
-plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
-plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt
index 89cf4db51..c04ae2328 100644
--- a/tests/sanity/ignore-2.15.txt
+++ b/tests/sanity/ignore-2.15.txt
@@ -2,16 +2,9 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und
plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
-plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
-plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt
index 89cf4db51..c04ae2328 100644
--- a/tests/sanity/ignore-2.16.txt
+++ b/tests/sanity/ignore-2.16.txt
@@ -2,16 +2,9 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und
plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
-plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin
-plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt
index 992ec6099..62d724706 100644
--- a/tests/sanity/ignore-2.9.txt
+++ b/tests/sanity/ignore-2.9.txt
@@ -26,10 +26,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing
plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin
plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported
-plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments.
-plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed.
plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
-plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user.
plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported
plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0
diff --git a/tests/unit/test_zos_backup_restore_unit.py b/tests/unit/test_zos_backup_restore_unit.py
index a751a7599..5920febdb 100644
--- a/tests/unit/test_zos_backup_restore_unit.py
+++ b/tests/unit/test_zos_backup_restore_unit.py
@@ -93,7 +93,7 @@ def assert_args_invalid(zos_backup_restore, arguments):
@pytest.mark.parametrize(
- "space_type", ["K", "M", "G", "TRK", "CYL", "k", "m", "g", "trk", "cyl"]
+ "space_type", ["k", "m", "g", "trk", "cyl"]
)
def test_valid_space_types(zos_backup_restore_mocker, space_type):
valid_args = dict(
diff --git a/tests/unit/test_zos_gather_facts.py b/tests/unit/test_zos_gather_facts.py
index 84b90c186..a7ab4a803 100644
--- a/tests/unit/test_zos_gather_facts.py
+++ b/tests/unit/test_zos_gather_facts.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (c) IBM Corporation 2022
+# Copyright (c) IBM Corporation 2022 - 2024
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -18,7 +18,6 @@
__metaclass__ = type
import pytest
-from mock import call
# Used my some mock modules, should match import directly below
IMPORT_NAME = "ibm_zos_core.plugins.modules.zos_gather_facts"
@@ -26,30 +25,32 @@
# Tests for zos_father_facts helper functions
test_data = [
- (["ipl"], "zinfo -j -t ipl"),
- (["ipl "], "zinfo -j -t ipl"),
- ([" ipl"], "zinfo -j -t ipl"),
- (["ipl", "sys"], "zinfo -j -t ipl -t sys"),
- (["all"], "zinfo -j -a"),
- (None, "zinfo -j -a"),
- (["ipl", "all", "sys"], "zinfo -j -a"),
+ (["ipl"], ["ipl"]),
+ (["ipl "], ["ipl"]),
+ ([" ipl"], ["ipl"]),
+ (["ipl", "sys"], ["ipl", "sys"]),
+ (["all"], ["all"]),
+ (None, ["all"]),
+ (["ipl", "all", "sys"], ["all"]),
# function does not validate legal vs illegal subsets
- (["asdf"], "zinfo -j -t asdf"),
- ([""], None), # attemtped injection
+ (["asdf"], ["asdf"]),
+ ([""], None),
(["ipl; cat /.bashrc"], None), # attemtped injection
+ # for now, 'all' with some other invalid subset resolves to 'all'
+ (["ipl", "all", "ipl; cat /.ssh/id_rsa"], ["all"]),
]
@pytest.mark.parametrize("args,expected", test_data)
-def test_zos_gather_facts_zinfo_cmd_string_builder(
+def test_zos_gather_facts_zinfo_facts_list_builder(
zos_import_mocker, args, expected):
mocker, importer = zos_import_mocker
zos_gather_facts = importer(IMPORT_NAME)
try:
- result = zos_gather_facts.zinfo_cmd_string_builder(args)
-# # add more logic here as the function evolves.
+ result = zos_gather_facts.zinfo_facts_list_builder(args)
+ # add more logic here as the function evolves.
except Exception:
result = None
assert result == expected